From 6e9547c276c09808a74e4b6ec4e4f6fc855af62d Mon Sep 17 00:00:00 2001
From: Pol Alvarez <pol.avms@gmail.com>
Date: Tue, 7 Feb 2023 19:13:09 +0100
Subject: [PATCH 01/65] initial version triggering embedding & marker heatmap
 after qc run

---
 src/api.v2/events/validateAndSubmitWork.js    |  2 +
 .../helpers/pipeline/handleQCResponse.js      |  4 +-
 .../worker/workSubmit/getExtraDependencies.js | 50 +++++++++++++++
 .../worker/workSubmit/submitEmbeddingWork.js  | 63 +++++++++++++++++++
 .../workSubmit/submitMarkerHeatmapWork.js     | 62 ++++++++++++++++++
 src/api.v2/helpers/worker/workerVersions.js   | 11 ++++
 6 files changed, 191 insertions(+), 1 deletion(-)
 create mode 100644 src/api.v2/helpers/worker/workSubmit/getExtraDependencies.js
 create mode 100644 src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
 create mode 100644 src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
 create mode 100644 src/api.v2/helpers/worker/workerVersions.js

diff --git a/src/api.v2/events/validateAndSubmitWork.js b/src/api.v2/events/validateAndSubmitWork.js
index 0d6988435..0c4200f42 100644
--- a/src/api.v2/events/validateAndSubmitWork.js
+++ b/src/api.v2/events/validateAndSubmitWork.js
@@ -6,8 +6,10 @@ const getPipelineStatus = require('../helpers/pipeline/getPipelineStatus');
 const pipelineConstants = require('../constants');
 
 const validateAndSubmitWork = async (workRequest) => {
+  console.log('validateAndSubmitWork: ', workRequest);
   const { experimentId } = workRequest;
 
+
   // Check if pipeline is runnning
   const { qc: { status: qcPipelineStatus } } = await getPipelineStatus(
     experimentId, pipelineConstants.QC_PROCESS_NAME,
diff --git a/src/api.v2/helpers/pipeline/handleQCResponse.js b/src/api.v2/helpers/pipeline/handleQCResponse.js
index 6ff08d0a3..15958d7ae 100644
--- a/src/api.v2/helpers/pipeline/handleQCResponse.js
+++ b/src/api.v2/helpers/pipeline/handleQCResponse.js
@@ -17,6 +17,8 @@ const getPipelineStatus = require('./getPipelineStatus');
 
 const Experiment = require('../../model/Experiment');
 const Plot = require('../../model/Plot');
+const submitEmbeddingWork = require('../worker/workSubmit/submitEmbeddingWork');
+const submitMarkerHeatmapWork = require('../worker/workSubmit/submitMarkerHeatmapWork');
 
 const logger = getLogger();
 
@@ -24,7 +26,7 @@ const hookRunner = new HookRunner();
 
 hookRunner.register(constants.ASSIGN_POD_TO_PIPELINE, [assignPodToPipeline]);
 hookRunner.registerAll([sendNotification]);
-hookRunner.register('configureEmbedding', [cleanupPods, updatePipelineVersion]);
+hookRunner.register('configureEmbedding', [cleanupPods, updatePipelineVersion, submitEmbeddingWork, submitMarkerHeatmapWork]);
 
 const getOutputFromS3 = async (message) => {
   const { output: { bucket, key } } = message;
diff --git a/src/api.v2/helpers/worker/workSubmit/getExtraDependencies.js b/src/api.v2/helpers/worker/workSubmit/getExtraDependencies.js
new file mode 100644
index 000000000..4caaa78c9
--- /dev/null
+++ b/src/api.v2/helpers/worker/workSubmit/getExtraDependencies.js
@@ -0,0 +1,50 @@
+// import workerVersions from 'utils/work/workerVersions';
+
+const workerVersions = require('../workerVersions');
+
+const Experiment = require('../../../model/Experiment');
+
+const getClusteringSettings = async (experimentId) => {
+  const processingConfig = await new Experiment().getProcessingConfig(experimentId);
+
+  console.log('processingConfig: ', processingConfig);
+  const { clusteringSettings } = processingConfig.configureEmbedding;
+
+
+  return clusteringSettings;
+};
+
+const dependencyGetters = {
+  ClusterCells: [],
+  GetExpressionCellSets: [],
+  GetEmbedding: [],
+  ListGenes: [],
+  DifferentialExpression: [getClusteringSettings],
+  GeneExpression: [],
+  GetBackgroundExpressedGenes: [getClusteringSettings],
+  DotPlot: [getClusteringSettings],
+  GetDoubletScore: [],
+  GetMitochondrialContent: [],
+  GetNGenes: [],
+  GetNUmis: [],
+  MarkerHeatmap: [getClusteringSettings],
+  GetTrajectoryAnalysisStartingNodes: [getClusteringSettings],
+  GetTrajectoryAnalysisPseudoTime: [getClusteringSettings],
+  GetNormalizedExpression: [getClusteringSettings],
+};
+
+const getExtraDependencies = async (experimentId, name, dispatch, getState) => {
+  const dependencies = await Promise.all(
+    dependencyGetters[name].map(
+      (dependencyGetter) => dependencyGetter(experimentId, dispatch, getState),
+    ),
+  );
+
+  if (workerVersions[name]) {
+    dependencies.push(workerVersions[name]);
+  }
+
+  return dependencies;
+};
+
+module.exports = getExtraDependencies;
diff --git a/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js b/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
new file mode 100644
index 000000000..512326c3d
--- /dev/null
+++ b/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
@@ -0,0 +1,63 @@
+const hash = require('object-hash');
+const validateAndSubmitWork = require('../../../events/validateAndSubmitWork');
+const getExperimentBackendStatus = require('../../backendStatus/getExperimentBackendStatus');
+
+const createObjectHash = (object) => hash.MD5(object);
+
+
+const submitEmbeddingWork = async (message) => {
+  console.log('payload ', message);
+
+  const {
+    experimentId, input:
+    { authJWT, config: { embeddingSettings: { method, methodSettings } } },
+  } = message;
+
+  const config = methodSettings[method];
+  // consider replacing with getPipelineStatus
+  const backendStatus = await getExperimentBackendStatus(experimentId);
+  const { pipeline: { startDate: qcPipelineStartDate } } = backendStatus;
+
+
+  console.log(`qcPipelineStartDate: ${qcPipelineStartDate} ${typeof (qcPipelineStartDate)}`);
+  const body = {
+    name: 'GetEmbedding',
+    type: method,
+    config,
+  };
+
+  const cacheUniquenessKey = null;
+
+  const extras = undefined;
+  const extraDependencies = [];
+  const workerVersion = 3;
+  const ETagBody = {
+    experimentId,
+    body,
+    qcPipelineStartDate: qcPipelineStartDate.toISOString(),
+    extras,
+    cacheUniquenessKey,
+    workerVersion,
+    extraDependencies,
+  };
+
+  console.log('embedding ETagBody: ', ETagBody);
+  const ETag = createObjectHash(ETagBody);
+  console.log('submitEmbeddingWork: embedding Etag ', ETag);
+
+  const now = new Date();
+  const timeout = 15 * 60 * 1000; // 15min in ms
+  const timeoutDate = new Date(now.getTime() + timeout);
+  const request = {
+    ETag,
+    socketId: 'randomID',
+    experimentId,
+    authJWT,
+    timeout: timeoutDate.toISOString(),
+    body,
+  };
+
+  await validateAndSubmitWork(request);
+};
+
+module.exports = submitEmbeddingWork;
diff --git a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
new file mode 100644
index 000000000..62d16299c
--- /dev/null
+++ b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
@@ -0,0 +1,62 @@
+const hash = require('object-hash');
+const validateAndSubmitWork = require('../../../events/validateAndSubmitWork');
+const getExperimentBackendStatus = require('../../backendStatus/getExperimentBackendStatus');
+const getExtraDependencies = require('./getExtraDependencies');
+
+const createObjectHash = (object) => hash.MD5(object);
+
+
+const submitMarkerHeatmapWork = async (message) => {
+  console.log('payload ', message);
+
+  const { experimentId, input: { authJWT } } = message;
+
+  // const { resolution } = methodSettings[method];
+  // consider replacing with getPipelineStatus
+  const backendStatus = await getExperimentBackendStatus(experimentId);
+  // console.log('backendStatus: ', backendStatus);
+  const { pipeline: { startDate: qcPipelineStartDate } } = backendStatus;
+  const numGenes = 5;
+  const selectedCellSet = 'louvain';
+
+  const body = {
+    name: 'MarkerHeatmap',
+    nGenes: numGenes,
+    cellSetKey: selectedCellSet,
+  };
+
+  const cacheUniquenessKey = null;
+
+  const extras = undefined;
+  const extraDependencies = await getExtraDependencies(experimentId, body.name);
+  const workerVersion = 3;
+  const ETagBody = {
+    experimentId,
+    body,
+    qcPipelineStartDate: qcPipelineStartDate.toISOString(),
+    extras,
+    cacheUniquenessKey,
+    workerVersion,
+    extraDependencies,
+  };
+
+  console.log('marker heatmap ETagBody: ', ETagBody);
+  const ETag = createObjectHash(ETagBody);
+  console.log('submitEmbeddingWork: marker heatmap Etag ', ETag);
+
+  const now = new Date();
+  const timeout = 15 * 60 * 1000; // 15min in ms
+  const timeoutDate = new Date(now.getTime() + timeout);
+  const request = {
+    ETag,
+    socketId: 'randomID',
+    experimentId,
+    authJWT,
+    timeout: timeoutDate.toISOString(),
+    body,
+  };
+
+  await validateAndSubmitWork(request);
+};
+
+module.exports = submitMarkerHeatmapWork;
diff --git a/src/api.v2/helpers/worker/workerVersions.js b/src/api.v2/helpers/worker/workerVersions.js
new file mode 100644
index 000000000..a4453e3f2
--- /dev/null
+++ b/src/api.v2/helpers/worker/workerVersions.js
@@ -0,0 +1,11 @@
+const workerVersions = {
+  ListGenes: 1,
+  MarkerHeatmap: 1,
+  GeneExpression: 1,
+  GetTrajectoryAnalysisStartingNodes: 1,
+  GetTrajectoryAnalysisPseudoTime: 1,
+  DotPlot: 1,
+  DifferentialExpression: 1,
+};
+
+module.exports = workerVersions;

From f94cd7b65386784e557bc44d0f64ebc502bd57a6 Mon Sep 17 00:00:00 2001
From: Pol Alvarez <pol.avms@gmail.com>
Date: Mon, 13 Feb 2023 09:10:54 +0100
Subject: [PATCH 02/65] writing tests

---
 .../helpers/pipeline/__mocks__/getPipelineStatus.js |  5 +++--
 .../worker/workSubmit/submitEmbeddingWork.js        | 13 +++++++++----
 .../worker/workSubmit/submitMarkerHeatmapWork.js    |  3 ++-
 src/config/default-config.js                        |  1 +
 4 files changed, 15 insertions(+), 7 deletions(-)

diff --git a/src/api.v2/helpers/pipeline/__mocks__/getPipelineStatus.js b/src/api.v2/helpers/pipeline/__mocks__/getPipelineStatus.js
index 5d1ee81d5..e19c7cee0 100644
--- a/src/api.v2/helpers/pipeline/__mocks__/getPipelineStatus.js
+++ b/src/api.v2/helpers/pipeline/__mocks__/getPipelineStatus.js
@@ -1,15 +1,16 @@
 const pipelineConstants = require('../../../constants');
 
+const date = new Date(1458619200000);
 const responseTemplates = {
   gem2s: {
     completedSteps: [],
-    startDate: null,
+    startDate: date,
     status: pipelineConstants.SUCCEEDED,
     stopDate: null,
   },
   qc: {
     completedSteps: [],
-    startDate: null,
+    startDate: date,
     status: pipelineConstants.SUCCEEDED,
     stopDate: null,
   },
diff --git a/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js b/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
index 512326c3d..47b643969 100644
--- a/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
+++ b/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
@@ -1,4 +1,5 @@
 const hash = require('object-hash');
+const config = require('../../../../config');
 const validateAndSubmitWork = require('../../../events/validateAndSubmitWork');
 const getExperimentBackendStatus = require('../../backendStatus/getExperimentBackendStatus');
 
@@ -13,7 +14,7 @@ const submitEmbeddingWork = async (message) => {
     { authJWT, config: { embeddingSettings: { method, methodSettings } } },
   } = message;
 
-  const config = methodSettings[method];
+  const embeddingConfig = methodSettings[method];
   // consider replacing with getPipelineStatus
   const backendStatus = await getExperimentBackendStatus(experimentId);
   const { pipeline: { startDate: qcPipelineStartDate } } = backendStatus;
@@ -23,14 +24,18 @@ const submitEmbeddingWork = async (message) => {
   const body = {
     name: 'GetEmbedding',
     type: method,
-    config,
+    embeddingConfig,
   };
 
+  // these values need to match explicitly the default ones defined in the UI at
+  // src/utils/work/fetchWork.js when calling the function generateETag if this file
+  // or the one in the UI has any default changed, the pre-computing of embeddings/marker heatmp
+  // will stop working as the ETags will no longer match.
   const cacheUniquenessKey = null;
-
   const extras = undefined;
   const extraDependencies = [];
-  const workerVersion = 3;
+  const workerVersion = { config };
+
   const ETagBody = {
     experimentId,
     body,
diff --git a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
index 62d16299c..8e8773fb9 100644
--- a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
+++ b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
@@ -1,4 +1,5 @@
 const hash = require('object-hash');
+const config = require('../../../../config');
 const validateAndSubmitWork = require('../../../events/validateAndSubmitWork');
 const getExperimentBackendStatus = require('../../backendStatus/getExperimentBackendStatus');
 const getExtraDependencies = require('./getExtraDependencies');
@@ -29,7 +30,7 @@ const submitMarkerHeatmapWork = async (message) => {
 
   const extras = undefined;
   const extraDependencies = await getExtraDependencies(experimentId, body.name);
-  const workerVersion = 3;
+  const { workerVersion } = config;
   const ETagBody = {
     experimentId,
     body,
diff --git a/src/config/default-config.js b/src/config/default-config.js
index c685542fa..766a1d72d 100644
--- a/src/config/default-config.js
+++ b/src/config/default-config.js
@@ -74,6 +74,7 @@ const config = {
   // Used for Batch reporting
   datadogApiKey: process.env.DD_API_KEY || '',
   datadogAppKey: process.env.DD_APP_KEY || '',
+  workerVersion: 3,
 };
 
 // We are in permanent develop staging environment

From fb9ac36bbbc8549bd2e17caa1b1677723a45eaa7 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Wed, 15 Feb 2023 09:30:08 -0300
Subject: [PATCH 03/65] Working version

---
 .../helpers/pipeline/getPipelineStatus.js     | 37 ++++++-----
 .../pipeline/pipelineConstruct/index.js       | 11 +++-
 .../pipeline/pipelineConstruct/qcHelpers.js   | 61 +++++++++++++++----
 src/api.v2/helpers/s3/fileExists.js           |  3 +-
 4 files changed, 84 insertions(+), 28 deletions(-)

diff --git a/src/api.v2/helpers/pipeline/getPipelineStatus.js b/src/api.v2/helpers/pipeline/getPipelineStatus.js
index 112009231..0ffa368ad 100644
--- a/src/api.v2/helpers/pipeline/getPipelineStatus.js
+++ b/src/api.v2/helpers/pipeline/getPipelineStatus.js
@@ -9,6 +9,7 @@ const getLogger = require('../../../utils/getLogger');
 const pipelineConstants = require('../../constants');
 const { getPipelineStepNames } = require('./pipelineConstruct/skeletons');
 const shouldGem2sRerun = require('./shouldGem2sRerun');
+const { qcStepNames, stepNameToBackendStepNames } = require('./pipelineConstruct/qcHelpers');
 
 const logger = getLogger();
 
@@ -20,7 +21,8 @@ const qcPipelineSteps = [
   'NumGenesVsNumUmisFilter',
   'DoubletScoresFilter',
   'DataIntegration',
-  'ConfigureEmbedding'];
+  'ConfigureEmbedding',
+];
 
 const gem2sPipelineSteps = [
   'DownloadGem',
@@ -29,7 +31,8 @@ const gem2sPipelineSteps = [
   'DoubletScores',
   'CreateSeurat',
   'PrepareExperiment',
-  'UploadToAWS'];
+  'UploadToAWS',
+];
 
 // pipelineStepNames are the names of pipeline steps for which we
 // want to report the progress back to the user
@@ -231,11 +234,10 @@ const getPipelineStatus = async (experimentId, processName) => {
   });
 
   let execution = {};
-  let completedSteps = [];
   let error = false;
-  let response;
+  let response = null;
 
-  const { executionArn = null, lastStatusResponse } = pipelineExecution;
+  const { executionArn = null, stateMachineArn = null, lastStatusResponse } = pipelineExecution;
   const shouldRerun = await shouldGem2sRerun(experimentId);
 
   try {
@@ -247,16 +249,21 @@ const getPipelineStatus = async (experimentId, processName) => {
 
     error = checkError(events);
     const executedSteps = getStepsFromExecutionHistory(events);
-    const lastExecuted = executedSteps[executedSteps.length - 1];
-    switch (processName) {
-      case pipelineConstants.QC_PROCESS_NAME:
-        completedSteps = qcPipelineSteps.slice(0, qcPipelineSteps.indexOf(lastExecuted) + 1);
-        break;
-      case pipelineConstants.GEM2S_PROCESS_NAME:
-        completedSteps = gem2sPipelineSteps.slice(0, gem2sPipelineSteps.indexOf(lastExecuted) + 1);
-        break;
-      default:
-        logger.error(`unknown process name ${processName}`);
+
+    // console.l
+    const stateMachine = await stepFunctions.describeStateMachine({
+      stateMachineArn,
+    }).promise();
+
+    let completedSteps = executedSteps;
+
+    if (processName === 'qc') {
+      const stepFunctionSteps = Object.keys(JSON.parse(stateMachine.definition).States);
+
+      const qcStepsCompletedPreviousRuns = _.difference(qcStepNames, stepFunctionSteps)
+        .map((rawStepName) => stepNameToBackendStepNames[rawStepName]);
+
+      completedSteps = qcStepsCompletedPreviousRuns.concat(executedSteps);
     }
 
     response = buildResponse(processName, execution, shouldRerun, error, completedSteps);
diff --git a/src/api.v2/helpers/pipeline/pipelineConstruct/index.js b/src/api.v2/helpers/pipeline/pipelineConstruct/index.js
index 0a53bf717..bdd02a16d 100644
--- a/src/api.v2/helpers/pipeline/pipelineConstruct/index.js
+++ b/src/api.v2/helpers/pipeline/pipelineConstruct/index.js
@@ -26,6 +26,8 @@ const {
 } = require('./utils');
 
 const buildStateMachineDefinition = require('./constructors/buildStateMachineDefinition');
+const getPipelineStatus = require('../getPipelineStatus');
+const constants = require('../../../constants');
 
 const logger = getLogger();
 
@@ -52,6 +54,11 @@ const createQCPipeline = async (experimentId, processingConfigUpdates, authJWT,
 
   const { processingConfig, samplesOrder } = await new Experiment().findById(experimentId).first();
 
+  const {
+    // @ts-ignore
+    [constants.QC_PROCESS_NAME]: status,
+  } = await getPipelineStatus(experimentId, constants.QC_PROCESS_NAME);
+
   if (processingConfigUpdates.length) {
     processingConfigUpdates.forEach(({ name, body }) => {
       if (!processingConfig[name]) {
@@ -72,7 +79,9 @@ const createQCPipeline = async (experimentId, processingConfigUpdates, authJWT,
 
   await cancelPreviousPipelines(experimentId, previousJobId);
 
-  const qcSteps = await getQcStepsToRun(experimentId, processingConfigUpdates);
+  const qcSteps = await getQcStepsToRun(
+    experimentId, processingConfigUpdates, status.completedSteps,
+  );
 
   const runInBatch = needsBatchJob(context.podCpus, context.podMemory);
 
diff --git a/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js b/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
index 5d4ef08c0..3c287d783 100644
--- a/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
+++ b/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
@@ -1,4 +1,4 @@
-
+const _ = require('lodash');
 const { fileExists } = require('../../s3/fileExists');
 const { FILTERED_CELLS } = require('../../../../config/bucketNames');
 
@@ -12,7 +12,7 @@ const filterToStepName = {
   configureEmbedding: 'ConfigureEmbedding',
 };
 
-const stepNames = [
+const qcStepNames = [
   'ClassifierFilterMap',
   'CellSizeDistributionFilterMap',
   'MitochondrialContentFilterMap',
@@ -22,6 +22,31 @@ const stepNames = [
   'ConfigureEmbedding',
 ];
 
+// TODO: Check in the code review
+// I did to get over the hurdle and have something working but:
+// we need to talk about why we are using
+// ClassifierFilter vs ClassifierFilterMap for the backend status
+// can we switch it over to ClassifierFilterMap?
+// would make stuff way easier
+const backendStepNamesToStepName = {
+  ClassifierFilter: 'ClassifierFilterMap',
+  CellSizeDistributionFilter: 'CellSizeDistributionFilterMap',
+  MitochondrialContentFilter: 'MitochondrialContentFilterMap',
+  NumGenesVsNumUmisFilter: 'NumGenesVsNumUmisFilterMap',
+  DoubletScoresFilter: 'DoubletScoresFilterMap',
+  DataIntegration: 'DataIntegration',
+  ConfigureEmbedding: 'ConfigureEmbedding',
+};
+
+const stepNameToBackendStepNames = {
+  ClassifierFilterMap: 'ClassifierFilter',
+  CellSizeDistributionFilterMap: 'CellSizeDistributionFilter',
+  MitochondrialContentFilterMap: 'MitochondrialContentFilter',
+  NumGenesVsNumUmisFilterMap: 'NumGenesVsNumUmisFilter',
+  DoubletScoresFilterMap: 'DoubletScoresFilter',
+  DataIntegration: 'DataIntegration',
+  ConfigureEmbedding: 'ConfigureEmbedding',
+};
 
 const qcStepsWithFilterSettings = [
   'cellSizeDistribution',
@@ -39,21 +64,33 @@ const hasFilteredCellIdsAvailable = async (experimentId) => (
 );
 // getFirstQCStep returns which is the first step of the QC to be run
 // processingConfigUpdates is not ordered
-const getFirstQCStep = async (experimentId, processingConfigUpdates) => {
-  let earliestStep = stepNames[0]; // normally first step
+const getFirstQCStep = async (experimentId, processingConfigUpdates, backendCompletedSteps) => {
+  let earliestChangedStep;
   let earliestIdx = 9999;
   processingConfigUpdates.forEach(({ name }) => {
     const stepName = filterToStepName[name];
-    const idx = stepNames.indexOf(stepName);
+    const idx = qcStepNames.indexOf(stepName);
     if (idx < earliestIdx) {
       earliestIdx = idx;
-      earliestStep = stepName;
+      earliestChangedStep = stepName;
     }
   });
 
+  const completedSteps = backendCompletedSteps.map(
+    (currentStep) => backendStepNamesToStepName[currentStep],
+  );
+
+  const pendingSteps = _.difference(qcStepNames, completedSteps);
+
+  // Choose the earliestStep by checking:
+  // if pendingSteps includes it, then pendingSteps has the earliest step
+  // if not, earliestChangedStep is the earliest step
+  const earliestStep = (!earliestChangedStep || pendingSteps.includes(earliestChangedStep))
+    ? pendingSteps[0] : earliestChangedStep;
+
   // if the earlist step to run is the first one, just return it without
   // further checks
-  if (earliestStep === stepNames[0]) {
+  if (earliestStep === qcStepNames[0]) {
     return earliestStep;
   }
   // if the first step to run is not the first in the pipeline (stepNames[0])
@@ -67,16 +104,18 @@ const getFirstQCStep = async (experimentId, processingConfigUpdates) => {
   }
 
 
-  return stepNames[0];
+  return qcStepNames[0];
 };
 
-const getQcStepsToRun = async (experimentId, processingConfigUpdates) => {
-  const firstStep = await getFirstQCStep(experimentId, processingConfigUpdates);
-  return stepNames.slice(stepNames.indexOf(firstStep));
+const getQcStepsToRun = async (experimentId, processingConfigUpdates, completedSteps) => {
+  const firstStep = await getFirstQCStep(experimentId, processingConfigUpdates, completedSteps);
+  return qcStepNames.slice(qcStepNames.indexOf(firstStep));
 };
 
 
 module.exports = {
   getQcStepsToRun,
   qcStepsWithFilterSettings,
+  qcStepNames,
+  stepNameToBackendStepNames,
 };
diff --git a/src/api.v2/helpers/s3/fileExists.js b/src/api.v2/helpers/s3/fileExists.js
index 6479669a8..55800ca38 100644
--- a/src/api.v2/helpers/s3/fileExists.js
+++ b/src/api.v2/helpers/s3/fileExists.js
@@ -1,4 +1,5 @@
 const getS3Client = require('./getS3Client');
+const logger = require('../../../utils/getLogger')();
 
 const fileExists = async (bucket, prefix) => {
   const s3 = getS3Client();
@@ -16,7 +17,7 @@ const fileExists = async (bucket, prefix) => {
       return false;
     }
     // if there is an exception
-    console.log(`could not check whether ${bucket}/${prefix} exists: ${err}`);
+    logger.log(`could not check whether ${bucket}/${prefix} exists: ${err}`);
     return false;
   }
   return true;

From 0e27584646b780a12b377ce8790d9aaa1406b8a7 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Wed, 15 Feb 2023 10:58:28 -0300
Subject: [PATCH 04/65] Cleanup

---
 src/api.v2/helpers/pipeline/getPipelineStatus.js | 1 -
 1 file changed, 1 deletion(-)

diff --git a/src/api.v2/helpers/pipeline/getPipelineStatus.js b/src/api.v2/helpers/pipeline/getPipelineStatus.js
index 0ffa368ad..eeff3ee13 100644
--- a/src/api.v2/helpers/pipeline/getPipelineStatus.js
+++ b/src/api.v2/helpers/pipeline/getPipelineStatus.js
@@ -250,7 +250,6 @@ const getPipelineStatus = async (experimentId, processName) => {
     error = checkError(events);
     const executedSteps = getStepsFromExecutionHistory(events);
 
-    // console.l
     const stateMachine = await stepFunctions.describeStateMachine({
       stateMachineArn,
     }).promise();

From c3d61dc3080fce458ea80d5ff6f05aca69f2863d Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Wed, 15 Feb 2023 16:04:25 -0300
Subject: [PATCH 05/65] Some refactoring

---
 .../helpers/pipeline/getPipelineStatus.js     | 41 ++++++++++++-------
 1 file changed, 26 insertions(+), 15 deletions(-)

diff --git a/src/api.v2/helpers/pipeline/getPipelineStatus.js b/src/api.v2/helpers/pipeline/getPipelineStatus.js
index eeff3ee13..9730947c0 100644
--- a/src/api.v2/helpers/pipeline/getPipelineStatus.js
+++ b/src/api.v2/helpers/pipeline/getPipelineStatus.js
@@ -213,6 +213,28 @@ const getStepsFromExecutionHistory = (events) => {
   return shortestCompletedToReport || [];
 };
 
+const getCompletedSteps = async (processName, stateMachineArn, executedSteps, stepFunctions) => {
+  let completedSteps;
+
+  if (processName === 'qc') {
+    const stateMachine = await stepFunctions.describeStateMachine({
+      stateMachineArn,
+    }).promise();
+
+
+    const stepFunctionSteps = Object.keys(JSON.parse(stateMachine.definition).States);
+
+    const qcStepsCompletedPreviousRuns = _.difference(qcStepNames, stepFunctionSteps)
+      .map((rawStepName) => stepNameToBackendStepNames[rawStepName]);
+
+    completedSteps = qcStepsCompletedPreviousRuns.concat(executedSteps);
+  } if (processName === 'gem2s') {
+    completedSteps = executedSteps;
+  }
+
+  return completedSteps;
+};
+
 /*
      * Return `completedSteps` of the state machine (SM) associated to the `experimentId`'s pipeline
      * The code assumes that
@@ -246,24 +268,13 @@ const getPipelineStatus = async (experimentId, processName) => {
     }).promise();
 
     const events = await getExecutionHistory(stepFunctions, executionArn);
-
     error = checkError(events);
-    const executedSteps = getStepsFromExecutionHistory(events);
-
-    const stateMachine = await stepFunctions.describeStateMachine({
-      stateMachineArn,
-    }).promise();
-
-    let completedSteps = executedSteps;
-
-    if (processName === 'qc') {
-      const stepFunctionSteps = Object.keys(JSON.parse(stateMachine.definition).States);
 
-      const qcStepsCompletedPreviousRuns = _.difference(qcStepNames, stepFunctionSteps)
-        .map((rawStepName) => stepNameToBackendStepNames[rawStepName]);
+    const executedSteps = getStepsFromExecutionHistory(events);
 
-      completedSteps = qcStepsCompletedPreviousRuns.concat(executedSteps);
-    }
+    const completedSteps = await getCompletedSteps(
+      processName, stateMachineArn, executedSteps, stepFunctions,
+    );
 
     response = buildResponse(processName, execution, shouldRerun, error, completedSteps);
   } catch (e) {

From cc5c6163efa6ec2fde8f8a1c33dda73c2d723194 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Thu, 16 Feb 2023 09:33:38 -0300
Subject: [PATCH 06/65] Fix some tests

---
 .../__snapshots__/qcHelpers.test.js.snap      |  70 ++++++-
 .../pipelineConstruct/qcHelpers.test.js       | 192 ++++++++++--------
 2 files changed, 173 insertions(+), 89 deletions(-)

diff --git a/tests/api.v2/helpers/pipeline/pipelineConstruct/__snapshots__/qcHelpers.test.js.snap b/tests/api.v2/helpers/pipeline/pipelineConstruct/__snapshots__/qcHelpers.test.js.snap
index 35540ba01..58ef2f283 100644
--- a/tests/api.v2/helpers/pipeline/pipelineConstruct/__snapshots__/qcHelpers.test.js.snap
+++ b/tests/api.v2/helpers/pipeline/pipelineConstruct/__snapshots__/qcHelpers.test.js.snap
@@ -1,6 +1,6 @@
 // Jest Snapshot v1, https://goo.gl/fbAQLP
 
-exports[`helper functions for skeletons returns the default first step and full state machine if the config has no updates 1`] = `
+exports[`helper functions for skeletons returns from first not-completed step if the config has changes after that 1`] = `
 Object {
   "CellSizeDistributionFilterMap": Object {
     "Catch": Array [
@@ -179,3 +179,71 @@ Object {
   },
 }
 `;
+
+exports[`helper functions for skeletons returns from first not-completed step if the config has no changes 1`] = `
+Object {
+  "ConfigureEmbedding": Object {
+    "Next": "EndOfPipeline",
+    "XCatch": Array [
+      Object {
+        "ErrorEquals": Array [
+          "States.ALL",
+        ],
+        "Next": "HandleError",
+        "ResultPath": "$.errorInfo",
+      },
+    ],
+    "XConstructorArgs": Object {
+      "perSample": false,
+      "taskName": "configureEmbedding",
+    },
+    "XStepType": "create-new-step",
+  },
+  "DataIntegration": Object {
+    "Next": "ConfigureEmbedding",
+    "XCatch": Array [
+      Object {
+        "ErrorEquals": Array [
+          "States.ALL",
+        ],
+        "Next": "HandleError",
+        "ResultPath": "$.errorInfo",
+      },
+    ],
+    "XConstructorArgs": Object {
+      "perSample": false,
+      "taskName": "dataIntegration",
+      "uploadCountMatrix": true,
+    },
+    "XStepType": "create-new-step",
+  },
+  "DoubletScoresFilterMap": Object {
+    "Catch": Array [
+      Object {
+        "ErrorEquals": Array [
+          "States.ALL",
+        ],
+        "Next": "HandleError",
+        "ResultPath": "$.errorInfo",
+      },
+    ],
+    "ItemsPath": "$.samples",
+    "Iterator": Object {
+      "StartAt": "DoubletScoresFilter",
+      "States": Object {
+        "DoubletScoresFilter": Object {
+          "End": true,
+          "XConstructorArgs": Object {
+            "perSample": true,
+            "taskName": "doubletScores",
+          },
+          "XStepType": "create-new-step",
+        },
+      },
+    },
+    "Next": "DataIntegration",
+    "ResultPath": null,
+    "Type": "Map",
+  },
+}
+`;
diff --git a/tests/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.test.js b/tests/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.test.js
index f2ee96bf1..812baf087 100644
--- a/tests/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.test.js
+++ b/tests/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.test.js
@@ -7,114 +7,130 @@ jest.mock('../../../../../src/api.v2/helpers/s3/fileExists', () => ({
   fileExists: jest.fn(() => true),
 }));
 
-describe('helper functions for skeletons', () => {
-  it('returns the correct first step given a list', async () => {
-    const processingConfig = [
-      {
-        name: 'numGenesVsNumUmis',
-        body: {
-          auto: true,
-          filterSettings: {
-            regressionType: 'linear',
-            regressionTypeSettings: {
-              linear: {
-                'p.level': 0.001,
-              },
-              spline: {
-                'p.level': 0.001,
-              },
-            },
+const processingConfig = [
+  {
+    name: 'numGenesVsNumUmis',
+    body: {
+      auto: true,
+      filterSettings: {
+        regressionType: 'linear',
+        regressionTypeSettings: {
+          linear: {
+            'p.level': 0.001,
           },
-          enabled: true,
-          '8e6ffc70-14c1-425f-b1be-cef9656a55a5': {
-            auto: true,
-            filterSettings: {
-              regressionType: 'linear',
-              regressionTypeSettings: {
-                linear: {
-                  'p.level': 0.0002898551,
-                },
-                spline: {
-                  'p.level': 0.001,
-                },
-              },
-            },
-            defaultFilterSettings: {
-              regressionType: 'linear',
-              regressionTypeSettings: {
-                linear: {
-                  'p.level': 0.0002898551,
-                },
-                spline: {
-                  'p.level': 0.001,
-                },
-              },
-            },
-            api_url: 'http://host.docker.internal:3000',
-            enabled: true,
+          spline: {
+            'p.level': 0.001,
           },
         },
       },
-      {
-        name: 'cellSizeDistribution',
-        body: {
-          auto: true,
-          filterSettings: {
-            minCellSize: 1080,
-            binStep: 200,
-          },
-          enabled: false,
-          '8e6ffc70-14c1-425f-b1be-cef9656a55a5': {
-            auto: true,
-            filterSettings: {
-              minCellSize: 1136,
-              binStep: 200,
+      enabled: true,
+      '8e6ffc70-14c1-425f-b1be-cef9656a55a5': {
+        auto: true,
+        filterSettings: {
+          regressionType: 'linear',
+          regressionTypeSettings: {
+            linear: {
+              'p.level': 0.0002898551,
             },
-            defaultFilterSettings: {
-              minCellSize: 1136,
-              binStep: 200,
+            spline: {
+              'p.level': 0.001,
             },
-            api_url: 'http://host.docker.internal:3000',
-            enabled: false,
           },
         },
-      },
-      {
-        name: 'doubletScores',
-        body: {
-          auto: true,
-          filterSettings: {
-            probabilityThreshold: 0.5,
-            binStep: 0.05,
-          },
-          enabled: true,
-          '8e6ffc70-14c1-425f-b1be-cef9656a55a5': {
-            auto: true,
-            filterSettings: {
-              probabilityThreshold: 0.6506245,
-              binStep: 0.05,
+        defaultFilterSettings: {
+          regressionType: 'linear',
+          regressionTypeSettings: {
+            linear: {
+              'p.level': 0.0002898551,
             },
-            defaultFilterSettings: {
-              probabilityThreshold: 0.6506245,
-              binStep: 0.05,
+            spline: {
+              'p.level': 0.001,
             },
-            api_url: 'http://host.docker.internal:3000',
-            enabled: true,
           },
         },
+        api_url: 'http://host.docker.internal:3000',
+        enabled: true,
       },
-    ];
+    },
+  },
+  {
+    name: 'cellSizeDistribution',
+    body: {
+      auto: true,
+      filterSettings: {
+        minCellSize: 1080,
+        binStep: 200,
+      },
+      enabled: false,
+      '8e6ffc70-14c1-425f-b1be-cef9656a55a5': {
+        auto: true,
+        filterSettings: {
+          minCellSize: 1136,
+          binStep: 200,
+        },
+        defaultFilterSettings: {
+          minCellSize: 1136,
+          binStep: 200,
+        },
+        api_url: 'http://host.docker.internal:3000',
+        enabled: false,
+      },
+    },
+  },
+  {
+    name: 'doubletScores',
+    body: {
+      auto: true,
+      filterSettings: {
+        probabilityThreshold: 0.5,
+        binStep: 0.05,
+      },
+      enabled: true,
+      '8e6ffc70-14c1-425f-b1be-cef9656a55a5': {
+        auto: true,
+        filterSettings: {
+          probabilityThreshold: 0.6506245,
+          binStep: 0.05,
+        },
+        defaultFilterSettings: {
+          probabilityThreshold: 0.6506245,
+          binStep: 0.05,
+        },
+        api_url: 'http://host.docker.internal:3000',
+        enabled: true,
+      },
+    },
+  },
+];
+
+describe('helper functions for skeletons', () => {
+  it('returns the first changed step if it is before all the completed steps', async () => {
+    const completedSteps = ['ClassifierFilter'];
 
-    const qcSteps = await getQcStepsToRun(fake.EXPERIMENT_ID, processingConfig);
+    const qcSteps = await getQcStepsToRun(fake.EXPERIMENT_ID, processingConfig, completedSteps);
     expect(qcSteps[0]).toEqual('CellSizeDistributionFilterMap');
   });
 
-  it('returns the default first step and full state machine if the config has no updates', async () => {
-    const processingConfig = [];
+  it('returns from first not-completed step if the config has changes after that', async () => {
+    const completedSteps = [];
 
-    const qcSteps = await getQcStepsToRun(fake.EXPERIMENT_ID, processingConfig);
+    const qcSteps = await getQcStepsToRun(fake.EXPERIMENT_ID, processingConfig, completedSteps);
     expect(qcSteps[0]).toEqual('ClassifierFilterMap');
     const stateMachine = buildQCPipelineSteps(qcSteps);
     expect(stateMachine).toMatchSnapshot();
   });
+
+  it('returns from first not-completed step if the config has no changes', async () => {
+    const completedSteps = [
+      'ClassifierFilter',
+      'CellSizeDistributionFilter',
+      'MitochondrialContentFilter',
+      'NumGenesVsNumUmisFilter',
+    ];
+
+    const qcSteps = await getQcStepsToRun(fake.EXPERIMENT_ID, [], completedSteps);
+    expect(qcSteps[0]).toEqual('DoubletScoresFilterMap');
+    const stateMachine = buildQCPipelineSteps(qcSteps);
+    expect(stateMachine).toMatchSnapshot();
+  });
 });

From bb65e91a321aa22a2769c39815e441a16c3bd80a Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Thu, 16 Feb 2023 13:27:19 -0300
Subject: [PATCH 07/65] Update tests

---
 .../pipeline/getPipelineStatus.test.js        | 20 +++++++++++++++++++
 1 file changed, 20 insertions(+)

diff --git a/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js b/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
index 4d2595132..8d28168c4 100644
--- a/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
+++ b/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
@@ -7,6 +7,7 @@ const pipelineConstants = require('../../../../src/api.v2/constants');
 const config = require('../../../../src/config');
 
 const ExperimentExecution = require('../../../../src/api.v2/model/ExperimentExecution');
+const { qcStepNames } = require('../../../../src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers');
 
 const experimentExecutionInstance = ExperimentExecution();
 
@@ -293,6 +294,11 @@ describe('pipelineStatus', () => {
     callback(null, params);
   });
 
+  const mockDescribeStateMachine = jest.fn();
+  AWSMock.mock('StepFunctions', 'describeStateMachine', (params, callback) => {
+    mockDescribeStateMachine(params, callback);
+  });
+
   AWSMock.mock('StepFunctions', 'getExecutionHistory', (params, callback) => {
     callback(null, { events: [] });
   });
@@ -498,6 +504,20 @@ describe('pipelineStatus', () => {
   });
 
   it('handles properly a qc sql record', async () => {
+    mockDescribeStateMachine.mockImplementation((params, callback) => {
+      const stateMachine = {
+        definition: JSON.stringify({
+          States: qcStepNames.reduce((acum, current) => {
+            // eslint-disable-next-line no-param-reassign
+            acum[current] = {};
+            return acum;
+          }, {}),
+        }),
+      };
+
+      callback(null, stateMachine);
+    });
+
     const status = await getPipelineStatus(SUCCEEDED_ID, QC_PROCESS_NAME);
 
     const expectedStatus = {

From 6b61a19b11612e65068c63e796e9323bae0af11e Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Thu, 16 Feb 2023 13:36:31 -0300
Subject: [PATCH 08/65] Add test for the new case

---
 .../pipeline/getPipelineStatus.test.js        | 56 ++++++++++++++++++-
 1 file changed, 55 insertions(+), 1 deletion(-)

diff --git a/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js b/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
index 8d28168c4..80e8f992c 100644
--- a/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
+++ b/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
@@ -503,7 +503,7 @@ describe('pipelineStatus', () => {
     );
   });
 
-  it('handles properly a qc sql record', async () => {
+  it('returns a full qc run from sql correctly', async () => {
     mockDescribeStateMachine.mockImplementation((params, callback) => {
       const stateMachine = {
         definition: JSON.stringify({
@@ -542,6 +542,60 @@ describe('pipelineStatus', () => {
     );
   });
 
+  it('returns a partial qc run from sql correctly', async () => {
+    // If only these 3 steps were scheduled for this state machine,
+    // The not shceduled steps were already completed from a previous run
+    const scheduledSteps = [
+      'DoubletScoresFilterMap',
+      'DataIntegration',
+      'ConfigureEmbedding',
+    ];
+
+    const previousRunCompletedSteps = [
+      'ClassifierFilter',
+      'CellSizeDistributionFilter',
+      'MitochondrialContentFilter',
+      'NumGenesVsNumUmisFilter',
+    ];
+
+    mockDescribeStateMachine.mockImplementation((params, callback) => {
+      const stateMachine = {
+        definition: JSON.stringify({
+          States: scheduledSteps.reduce((acum, current) => {
+            // eslint-disable-next-line no-param-reassign
+            acum[current] = {};
+            return acum;
+          }, {}),
+        }),
+      };
+
+      callback(null, stateMachine);
+    });
+
+    const status = await getPipelineStatus(SUCCEEDED_ID, QC_PROCESS_NAME);
+
+    const expectedStatus = {
+      [QC_PROCESS_NAME]: {
+        startDate: new Date(0),
+        stopDate: new Date(0),
+        status: constants.SUCCEEDED,
+        error: false,
+        completedSteps: previousRunCompletedSteps,
+        shouldRerun: true,
+      },
+    };
+
+    expect(status).toEqual(expectedStatus);
+
+    expect(experimentExecutionInstance.find).toHaveBeenCalledWith({ experiment_id: SUCCEEDED_ID });
+
+    // sql last_status_response is updated because it differs
+    expect(experimentExecutionInstance.update).toHaveBeenCalledWith(
+      { experiment_id: SUCCEEDED_ID, pipeline_type: QC_PROCESS_NAME },
+      { last_status_response: expectedStatus },
+    );
+  });
+
 
   it('doesn\'t update sql last_status_response if it already matches', async () => {
     const expectedStatus = {

From 8fa1bec61fbc97c1134a8312f28b0072055065ec Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Thu, 16 Feb 2023 13:42:09 -0300
Subject: [PATCH 09/65] Update comments

---
 tests/api.v2/helpers/pipeline/getPipelineStatus.test.js | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js b/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
index 80e8f992c..ede072774 100644
--- a/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
+++ b/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
@@ -543,14 +543,17 @@ describe('pipelineStatus', () => {
   });
 
   it('returns a partial qc run from sql correctly', async () => {
-    // If only these 3 steps were scheduled for this state machine,
-    // The not shceduled steps were already completed from a previous run
+    // Only these 3 steps were scheduled for this state machine,
+    // The not scheduled steps were already completed from a previous run
     const scheduledSteps = [
       'DoubletScoresFilterMap',
       'DataIntegration',
       'ConfigureEmbedding',
     ];
 
+    // These are the steps that were already completed,
+    // we need to check that these are marked as "completed" even
+    // if they are not completed in this state machine
     const previousRunCompletedSteps = [
       'ClassifierFilter',
       'CellSizeDistributionFilter',

From c17fa89204f774c1053459abd6fe854e128d9455 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Thu, 16 Feb 2023 15:01:56 -0300
Subject: [PATCH 10/65] Temporarily move all translations into
 qcStepNameTranslations

---
 .../helpers/pipeline/getPipelineStatus.js     |  2 +-
 .../constructors/qcStepNameTranslations.js    | 44 ++++++++++++++++
 .../pipeline/pipelineConstruct/qcHelpers.js   | 50 +------------------
 .../pipeline/getPipelineStatus.test.js        |  2 +-
 4 files changed, 47 insertions(+), 51 deletions(-)
 create mode 100644 src/api.v2/helpers/pipeline/pipelineConstruct/constructors/qcStepNameTranslations.js

diff --git a/src/api.v2/helpers/pipeline/getPipelineStatus.js b/src/api.v2/helpers/pipeline/getPipelineStatus.js
index 9730947c0..56b9ae9e2 100644
--- a/src/api.v2/helpers/pipeline/getPipelineStatus.js
+++ b/src/api.v2/helpers/pipeline/getPipelineStatus.js
@@ -9,7 +9,7 @@ const getLogger = require('../../../utils/getLogger');
 const pipelineConstants = require('../../constants');
 const { getPipelineStepNames } = require('./pipelineConstruct/skeletons');
 const shouldGem2sRerun = require('./shouldGem2sRerun');
-const { qcStepNames, stepNameToBackendStepNames } = require('./pipelineConstruct/qcHelpers');
+const { qcStepNames, stepNameToBackendStepNames } = require('./pipelineConstruct/constructors/qcStepNameTranslations');
 
 const logger = getLogger();
 
diff --git a/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/qcStepNameTranslations.js b/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/qcStepNameTranslations.js
new file mode 100644
index 000000000..9da7d0839
--- /dev/null
+++ b/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/qcStepNameTranslations.js
@@ -0,0 +1,44 @@
+
+const filterToStepName = {
+  classifier: 'ClassifierFilterMap',
+  cellSizeDistribution: 'CellSizeDistributionFilterMap',
+  mitochondrialContent: 'MitochondrialContentFilterMap',
+  numGenesVsNumUmis: 'NumGenesVsNumUmisFilterMap',
+  doubletScores: 'DoubletScoresFilterMap',
+  dataIntegration: 'DataIntegration',
+  configureEmbedding: 'ConfigureEmbedding',
+};
+
+const qcStepNames = [
+  'ClassifierFilterMap',
+  'CellSizeDistributionFilterMap',
+  'MitochondrialContentFilterMap',
+  'NumGenesVsNumUmisFilterMap',
+  'DoubletScoresFilterMap',
+  'DataIntegration',
+  'ConfigureEmbedding',
+];
+
+const backendStepNamesToStepName = {
+  ClassifierFilter: 'ClassifierFilterMap',
+  CellSizeDistributionFilter: 'CellSizeDistributionFilterMap',
+  MitochondrialContentFilter: 'MitochondrialContentFilterMap',
+  NumGenesVsNumUmisFilter: 'NumGenesVsNumUmisFilterMap',
+  DoubletScoresFilter: 'DoubletScoresFilterMap',
+  DataIntegration: 'DataIntegration',
+  ConfigureEmbedding: 'ConfigureEmbedding',
+};
+
+const stepNameToBackendStepNames = {
+  ClassifierFilterMap: 'ClassifierFilter',
+  CellSizeDistributionFilterMap: 'CellSizeDistributionFilter',
+  MitochondrialContentFilterMap: 'MitochondrialContentFilter',
+  NumGenesVsNumUmisFilterMap: 'NumGenesVsNumUmisFilter',
+  DoubletScoresFilterMap: 'DoubletScoresFilter',
+  DataIntegration: 'DataIntegration',
+  ConfigureEmbedding: 'ConfigureEmbedding',
+};
+
+module.exports = {
+  stepNameToBackendStepNames, backendStepNamesToStepName, qcStepNames, filterToStepName,
+};
diff --git a/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js b/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
index 3c287d783..e43e4b37a 100644
--- a/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
+++ b/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
@@ -1,52 +1,7 @@
 const _ = require('lodash');
 const { fileExists } = require('../../s3/fileExists');
 const { FILTERED_CELLS } = require('../../../../config/bucketNames');
-
-const filterToStepName = {
-  classifier: 'ClassifierFilterMap',
-  cellSizeDistribution: 'CellSizeDistributionFilterMap',
-  mitochondrialContent: 'MitochondrialContentFilterMap',
-  numGenesVsNumUmis: 'NumGenesVsNumUmisFilterMap',
-  doubletScores: 'DoubletScoresFilterMap',
-  dataIntegration: 'DataIntegration',
-  configureEmbedding: 'ConfigureEmbedding',
-};
-
-const qcStepNames = [
-  'ClassifierFilterMap',
-  'CellSizeDistributionFilterMap',
-  'MitochondrialContentFilterMap',
-  'NumGenesVsNumUmisFilterMap',
-  'DoubletScoresFilterMap',
-  'DataIntegration',
-  'ConfigureEmbedding',
-];
-
-// TODO: Check in the code review
-// I did to get over the hurdle and have something working but:
-// we need to talk about why we are using
-// ClassifierFilter vs ClassifierFilterMap for the backend status
-// can we switch it over to ClassifierFilterMap?
-// would make stuff way easier
-const backendStepNamesToStepName = {
-  ClassifierFilter: 'ClassifierFilterMap',
-  CellSizeDistributionFilter: 'CellSizeDistributionFilterMap',
-  MitochondrialContentFilter: 'MitochondrialContentFilterMap',
-  NumGenesVsNumUmisFilter: 'NumGenesVsNumUmisFilterMap',
-  DoubletScoresFilter: 'DoubletScoresFilterMap',
-  DataIntegration: 'DataIntegration',
-  ConfigureEmbedding: 'ConfigureEmbedding',
-};
-
-const stepNameToBackendStepNames = {
-  ClassifierFilterMap: 'ClassifierFilter',
-  CellSizeDistributionFilterMap: 'CellSizeDistributionFilter',
-  MitochondrialContentFilterMap: 'MitochondrialContentFilter',
-  NumGenesVsNumUmisFilterMap: 'NumGenesVsNumUmisFilter',
-  DoubletScoresFilterMap: 'DoubletScoresFilter',
-  DataIntegration: 'DataIntegration',
-  ConfigureEmbedding: 'ConfigureEmbedding',
-};
+const { filterToStepName, qcStepNames, backendStepNamesToStepName } = require('./constructors/qcStepNameTranslations');
 
 const qcStepsWithFilterSettings = [
   'cellSizeDistribution',
@@ -112,10 +67,7 @@ const getQcStepsToRun = async (experimentId, processingConfigUpdates, completedS
   return qcStepNames.slice(qcStepNames.indexOf(firstStep));
 };
 
-
 module.exports = {
   getQcStepsToRun,
   qcStepsWithFilterSettings,
-  qcStepNames,
-  stepNameToBackendStepNames,
 };
diff --git a/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js b/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
index ede072774..7ea084548 100644
--- a/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
+++ b/tests/api.v2/helpers/pipeline/getPipelineStatus.test.js
@@ -7,7 +7,7 @@ const pipelineConstants = require('../../../../src/api.v2/constants');
 const config = require('../../../../src/config');
 
 const ExperimentExecution = require('../../../../src/api.v2/model/ExperimentExecution');
-const { qcStepNames } = require('../../../../src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers');
+const { qcStepNames } = require('../../../../src/api.v2/helpers/pipeline/pipelineConstruct/constructors/qcStepNameTranslations');
 
 const experimentExecutionInstance = ExperimentExecution();
 

From 7794873d1c4bcbf47d2ae073ad7c0fb82d1a8d90 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Fri, 17 Feb 2023 18:54:50 +0000
Subject: [PATCH 11/65] add cellsest to traj analysis work request

---
 .../WorkRequestTrajectoryAnalysisGetPseudoTime.v2.yaml       | 5 +++++
 .../WorkRequestTrajectoryAnalysisGetStartingNodes.v2.yaml    | 5 +++++
 2 files changed, 10 insertions(+)

diff --git a/src/specs/models/work-request-bodies/WorkRequestTrajectoryAnalysisGetPseudoTime.v2.yaml b/src/specs/models/work-request-bodies/WorkRequestTrajectoryAnalysisGetPseudoTime.v2.yaml
index 7988c3fb7..ba4b0b41a 100644
--- a/src/specs/models/work-request-bodies/WorkRequestTrajectoryAnalysisGetPseudoTime.v2.yaml
+++ b/src/specs/models/work-request-bodies/WorkRequestTrajectoryAnalysisGetPseudoTime.v2.yaml
@@ -34,6 +34,10 @@ properties:
         type: string
       resolution:
         type: number
+  cellSets:
+    type: array
+    items:
+      type: string
   rootNodes:
     type: array
     items:
@@ -42,5 +46,6 @@ required:
   - name
   - embedding
   - clustering
+  - cellSets
   - rootNodes
 additionalProperties: false
\ No newline at end of file
diff --git a/src/specs/models/work-request-bodies/WorkRequestTrajectoryAnalysisGetStartingNodes.v2.yaml b/src/specs/models/work-request-bodies/WorkRequestTrajectoryAnalysisGetStartingNodes.v2.yaml
index 9fce08eb0..25951bf2d 100644
--- a/src/specs/models/work-request-bodies/WorkRequestTrajectoryAnalysisGetStartingNodes.v2.yaml
+++ b/src/specs/models/work-request-bodies/WorkRequestTrajectoryAnalysisGetStartingNodes.v2.yaml
@@ -34,8 +34,13 @@ properties:
         type: string
       resolution:
         type: number
+  cellSets:
+    type: array
+    items:
+      type: string
 required:
   - name
   - embedding
   - clustering
+  - cellSets
 additionalProperties: false
\ No newline at end of file

From 027ae627bcbba931e37ea4c78667535ca78f9361 Mon Sep 17 00:00:00 2001
From: Pol Alvarez <pol.avms@gmail.com>
Date: Sat, 18 Feb 2023 14:17:15 +0800
Subject: [PATCH 12/65] implemented tests

---
 .secrets.baseline                             |  8 ++-
 .../worker/__mocks__/createObjectHash.js      |  7 ++
 .../worker/__mocks__/getWorkerStatus.js       | 21 ++++++
 src/api.v2/helpers/worker/createObjectHash.js |  5 ++
 .../worker/workSubmit/getExtraDependencies.js | 18 ++----
 .../worker/workSubmit/submitEmbeddingWork.js  | 12 ++--
 .../workSubmit/submitMarkerHeatmapWork.js     |  9 +--
 src/api.v2/model/Experiment.js                |  1 +
 src/config/default-config.js                  |  2 +-
 src/config/test-config.js                     |  3 +-
 .../submitEmbeddingWork.test.js.snap          | 24 +++++++
 .../submitMarkerHeatmapWork.test.js.snap      | 31 +++++++++
 .../worker/submitEmbeddingWork.test.js        | 63 ++++++++++++++++++
 .../worker/submitMarkerHeatmapWork.test.js    | 64 +++++++++++++++++++
 .../data/getProcessingConfigResponse.json     | 28 ++++----
 .../default-config-production.test.js.snap    |  1 +
 ...ault-config-staging-sandboxid.test.js.snap |  1 +
 .../default-config-staging.test.js.snap       |  1 +
 18 files changed, 261 insertions(+), 38 deletions(-)
 create mode 100644 src/api.v2/helpers/worker/__mocks__/createObjectHash.js
 create mode 100644 src/api.v2/helpers/worker/__mocks__/getWorkerStatus.js
 create mode 100644 src/api.v2/helpers/worker/createObjectHash.js
 create mode 100644 tests/api.v2/helpers/worker/__snapshots__/submitEmbeddingWork.test.js.snap
 create mode 100644 tests/api.v2/helpers/worker/__snapshots__/submitMarkerHeatmapWork.test.js.snap
 create mode 100644 tests/api.v2/helpers/worker/submitEmbeddingWork.test.js
 create mode 100644 tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js

diff --git a/.secrets.baseline b/.secrets.baseline
index 0539f103a..2ca39c17c 100644
--- a/.secrets.baseline
+++ b/.secrets.baseline
@@ -2,7 +2,13 @@
   "custom_plugin_paths": [],
   "exclude": {
     "lines": ".*integrity.*",
-    "files": "package-lock.json"
+    "files": [
+      "package-lock.json",
+      "default-config-production.test.js.snap",
+      "default-config-staging.test.js.snap",
+      "default-config-staging-sandboxid.test.js.snap",
+      "tests/config/__snapshots__/default-config-production.test.js.snap"
+    ]
   },
   "generated_at": "2020-10-27T12:47:37Z",
   "plugins_used": [
diff --git a/src/api.v2/helpers/worker/__mocks__/createObjectHash.js b/src/api.v2/helpers/worker/__mocks__/createObjectHash.js
new file mode 100644
index 000000000..485a4a333
--- /dev/null
+++ b/src/api.v2/helpers/worker/__mocks__/createObjectHash.js
@@ -0,0 +1,7 @@
+const hash = require('object-hash');
+
+const mockCreateObjectHash = jest.fn(
+  (object) => hash.MD5(object),
+);
+
+module.exports = mockCreateObjectHash;
diff --git a/src/api.v2/helpers/worker/__mocks__/getWorkerStatus.js b/src/api.v2/helpers/worker/__mocks__/getWorkerStatus.js
new file mode 100644
index 000000000..beef527f6
--- /dev/null
+++ b/src/api.v2/helpers/worker/__mocks__/getWorkerStatus.js
@@ -0,0 +1,21 @@
+
+const mockGetWorkerStatus = jest.fn(
+  (experimentId) => new Promise((resolve, reject) => {
+    if (experimentId === 'nonExistentId') {
+      const err = new Error('Unkonwn project or sample');
+      err.status = 404;
+
+      reject(err);
+    }
+
+    const response = {
+      worker: {
+        ready: true, restartCount: 0, started: true, status: 'Running',
+      },
+    };
+
+    resolve(response);
+  }),
+);
+
+module.exports = mockGetWorkerStatus;
diff --git a/src/api.v2/helpers/worker/createObjectHash.js b/src/api.v2/helpers/worker/createObjectHash.js
new file mode 100644
index 000000000..68f1d194e
--- /dev/null
+++ b/src/api.v2/helpers/worker/createObjectHash.js
@@ -0,0 +1,5 @@
+const hash = require('object-hash');
+
+const createObjectHash = (object) => hash.MD5(object);
+
+module.exports = createObjectHash;
diff --git a/src/api.v2/helpers/worker/workSubmit/getExtraDependencies.js b/src/api.v2/helpers/worker/workSubmit/getExtraDependencies.js
index 4caaa78c9..27669fa24 100644
--- a/src/api.v2/helpers/worker/workSubmit/getExtraDependencies.js
+++ b/src/api.v2/helpers/worker/workSubmit/getExtraDependencies.js
@@ -1,14 +1,8 @@
-// import workerVersions from 'utils/work/workerVersions';
-
 const workerVersions = require('../workerVersions');
 
-const Experiment = require('../../../model/Experiment');
-
-const getClusteringSettings = async (experimentId) => {
-  const processingConfig = await new Experiment().getProcessingConfig(experimentId);
-
-  console.log('processingConfig: ', processingConfig);
-  const { clusteringSettings } = processingConfig.configureEmbedding;
+const getClusteringSettings = async (message) => {
+  console.log('processingConfig: ', message);
+  const { input: { config: { clusteringSettings } } } = message;
 
 
   return clusteringSettings;
@@ -33,10 +27,12 @@ const dependencyGetters = {
   GetNormalizedExpression: [getClusteringSettings],
 };
 
-const getExtraDependencies = async (experimentId, name, dispatch, getState) => {
+// message is assumed to be the configureEmbedding payload received
+// from the pipeline containing clutering & embedding settings
+const getExtraDependencies = async (name, message) => {
   const dependencies = await Promise.all(
     dependencyGetters[name].map(
-      (dependencyGetter) => dependencyGetter(experimentId, dispatch, getState),
+      (dependencyGetter) => dependencyGetter(message),
     ),
   );
 
diff --git a/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js b/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
index 47b643969..d8ae7f31d 100644
--- a/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
+++ b/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
@@ -1,9 +1,7 @@
-const hash = require('object-hash');
 const config = require('../../../../config');
 const validateAndSubmitWork = require('../../../events/validateAndSubmitWork');
 const getExperimentBackendStatus = require('../../backendStatus/getExperimentBackendStatus');
-
-const createObjectHash = (object) => hash.MD5(object);
+const createObjectHash = require('../createObjectHash');
 
 
 const submitEmbeddingWork = async (message) => {
@@ -15,7 +13,6 @@ const submitEmbeddingWork = async (message) => {
   } = message;
 
   const embeddingConfig = methodSettings[method];
-  // consider replacing with getPipelineStatus
   const backendStatus = await getExperimentBackendStatus(experimentId);
   const { pipeline: { startDate: qcPipelineStartDate } } = backendStatus;
 
@@ -24,7 +21,7 @@ const submitEmbeddingWork = async (message) => {
   const body = {
     name: 'GetEmbedding',
     type: method,
-    embeddingConfig,
+    config: embeddingConfig,
   };
 
   // these values need to match explicitly the default ones defined in the UI at
@@ -34,7 +31,7 @@ const submitEmbeddingWork = async (message) => {
   const cacheUniquenessKey = null;
   const extras = undefined;
   const extraDependencies = [];
-  const workerVersion = { config };
+  const { workerVersion } = config;
 
   const ETagBody = {
     experimentId,
@@ -63,6 +60,9 @@ const submitEmbeddingWork = async (message) => {
   };
 
   await validateAndSubmitWork(request);
+
+  // explicitly return ETag to make it stand out more in tests and so harder to break
+  return ETag;
 };
 
 module.exports = submitEmbeddingWork;
diff --git a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
index 8e8773fb9..2f929841a 100644
--- a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
+++ b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
@@ -1,11 +1,9 @@
-const hash = require('object-hash');
+const createObjectHash = require('../createObjectHash');
 const config = require('../../../../config');
 const validateAndSubmitWork = require('../../../events/validateAndSubmitWork');
 const getExperimentBackendStatus = require('../../backendStatus/getExperimentBackendStatus');
 const getExtraDependencies = require('./getExtraDependencies');
 
-const createObjectHash = (object) => hash.MD5(object);
-
 
 const submitMarkerHeatmapWork = async (message) => {
   console.log('payload ', message);
@@ -29,7 +27,7 @@ const submitMarkerHeatmapWork = async (message) => {
   const cacheUniquenessKey = null;
 
   const extras = undefined;
-  const extraDependencies = await getExtraDependencies(experimentId, body.name);
+  const extraDependencies = await getExtraDependencies(body.name, message);
   const { workerVersion } = config;
   const ETagBody = {
     experimentId,
@@ -58,6 +56,9 @@ const submitMarkerHeatmapWork = async (message) => {
   };
 
   await validateAndSubmitWork(request);
+
+  // explicitly return ETag to make it stand out more in tests and so harder to break
+  return ETag;
 };
 
 module.exports = submitMarkerHeatmapWork;
diff --git a/src/api.v2/model/Experiment.js b/src/api.v2/model/Experiment.js
index a7df9c7eb..24908b9e2 100644
--- a/src/api.v2/model/Experiment.js
+++ b/src/api.v2/model/Experiment.js
@@ -202,6 +202,7 @@ class Experiment extends BasicModel {
     }
   }
 
+
   async getProcessingConfig(experimentId) {
     const result = await this.findOne({ id: experimentId });
     if (_.isEmpty(result)) {
diff --git a/src/config/default-config.js b/src/config/default-config.js
index 766a1d72d..39f243bc1 100644
--- a/src/config/default-config.js
+++ b/src/config/default-config.js
@@ -74,7 +74,7 @@ const config = {
   // Used for Batch reporting
   datadogApiKey: process.env.DD_API_KEY || '',
   datadogAppKey: process.env.DD_APP_KEY || '',
-  workerVersion: 3,
+  workerVersion: 3, // needs to match workerVersion in UI
 };
 
 // We are in permanent develop staging environment
diff --git a/src/config/test-config.js b/src/config/test-config.js
index cdf082ae6..18510a08a 100644
--- a/src/config/test-config.js
+++ b/src/config/test-config.js
@@ -22,6 +22,7 @@ module.exports = {
   cachingEnabled: false,
   publicApiUrl: 'test-public-api-url',
   awsBatchIgnoreSSLCertificate: false,
-  datadogApiKey: 'test-datadog-api-key',
+  datadogApiKey: 'test-datadog-api-key', // pragma: allowlist secret
   datadogAppKey: 'test-datadog-app-key',
+  workerVersion: 3, // needs to match workerVersion in UI
 };
diff --git a/tests/api.v2/helpers/worker/__snapshots__/submitEmbeddingWork.test.js.snap b/tests/api.v2/helpers/worker/__snapshots__/submitEmbeddingWork.test.js.snap
new file mode 100644
index 000000000..c87840138
--- /dev/null
+++ b/tests/api.v2/helpers/worker/__snapshots__/submitEmbeddingWork.test.js.snap
@@ -0,0 +1,24 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`submitWorkEmbedding submits the work and the ETag / params are correct 1`] = `
+Array [
+  Array [
+    Object {
+      "body": Object {
+        "config": Object {
+          "distanceMetric": "cosine",
+          "minimumDistance": 0.3,
+        },
+        "name": "GetEmbedding",
+        "type": "umap",
+      },
+      "cacheUniquenessKey": null,
+      "experimentId": "6463cb35-3e08-4e94-a181-6d155a5ca570",
+      "extraDependencies": Array [],
+      "extras": undefined,
+      "qcPipelineStartDate": "2016-03-22T04:00:00.000Z",
+      "workerVersion": 3,
+    },
+  ],
+]
+`;
diff --git a/tests/api.v2/helpers/worker/__snapshots__/submitMarkerHeatmapWork.test.js.snap b/tests/api.v2/helpers/worker/__snapshots__/submitMarkerHeatmapWork.test.js.snap
new file mode 100644
index 000000000..9501bcd70
--- /dev/null
+++ b/tests/api.v2/helpers/worker/__snapshots__/submitMarkerHeatmapWork.test.js.snap
@@ -0,0 +1,31 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`submitWorkEmbedding submits the work and the ETag / params are correct 1`] = `
+Array [
+  Array [
+    Object {
+      "body": Object {
+        "cellSetKey": "louvain",
+        "nGenes": 5,
+        "name": "MarkerHeatmap",
+      },
+      "cacheUniquenessKey": null,
+      "experimentId": "6463cb35-3e08-4e94-a181-6d155a5ca570",
+      "extraDependencies": Array [
+        Object {
+          "method": "louvain",
+          "methodSettings": Object {
+            "louvain": Object {
+              "resolution": 0.8,
+            },
+          },
+        },
+        1,
+      ],
+      "extras": undefined,
+      "qcPipelineStartDate": "2016-03-22T04:00:00.000Z",
+      "workerVersion": 3,
+    },
+  ],
+]
+`;
diff --git a/tests/api.v2/helpers/worker/submitEmbeddingWork.test.js b/tests/api.v2/helpers/worker/submitEmbeddingWork.test.js
new file mode 100644
index 000000000..01e466d6f
--- /dev/null
+++ b/tests/api.v2/helpers/worker/submitEmbeddingWork.test.js
@@ -0,0 +1,63 @@
+const createObjectHash = require('../../../../src/api.v2/helpers/worker/createObjectHash');
+const submitEmbeddingWork = require('../../../../src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork');
+const validateAndSubmitWork = require('../../../../src/api.v2/events/validateAndSubmitWork');
+
+
+jest.mock('../../../../src/api.v2/helpers/worker/createObjectHash');
+jest.mock('../../../../src/api.v2/helpers/pipeline/getPipelineStatus');
+jest.mock('../../../../src/api.v2/helpers/worker/getWorkerStatus');
+jest.mock('../../../../src/api.v2/events/validateAndSubmitWork');
+
+
+const message = {
+  experimentId: '6463cb35-3e08-4e94-a181-6d155a5ca570',
+  taskName: 'configureEmbedding',
+  input: {
+    experimentId: '6463cb35-3e08-4e94-a181-6d155a5ca570',
+    taskName: 'configureEmbedding',
+    processName: 'qc',
+    sampleUuid: '',
+    uploadCountMatrix: false,
+    authJWT: 'Bearer whatever',
+    config: {
+      embeddingSettings: {
+        method: 'umap',
+        methodSettings: {
+          tsne: { perplexity: 30, learningRate: 200 },
+          umap: { distanceMetric: 'cosine', minimumDistance: 0.3 },
+        },
+      },
+      clusteringSettings: {
+        method: 'louvain',
+        methodSettings: { louvain: { resolution: 0.8 } },
+      },
+    },
+
+  },
+  output: {
+    bucket: 'worker-results-development-000000000000',
+    key: '0eabfedf-0efe-4abf-8725-7062c54ed5e1',
+  },
+  response: { error: false },
+  pipelineVersion: 2,
+  apiUrl: null,
+};
+
+describe('submitWorkEmbedding', () => {
+  // If this test fails it means you have changed parameters upon which the feature or precomputing
+  // the embedding / marker heatmp feature depends on. These parameters are duplicated
+  // in the UI / API if you have changed them here, make sure you change them in the
+  // other repository or that feature will stop working.
+  it('submits the work and the ETag / params are correct', async () => {
+    const ETag = await submitEmbeddingWork(message);
+
+    // these are the parameters used to created the ETag and
+    // they should match exactly UI snapshot:
+    // loadEmbedding.defaultParams.test.js.snap
+    expect(createObjectHash.mock.calls).toMatchSnapshot();
+    // this ETag should match exactly the one in
+    // loadEmbedding.defaultParams.test.js
+    expect(ETag).toEqual('fb7a8a67c7806fa90c69b9cdb16dd478'); // pragma: allowlist secret
+    expect(validateAndSubmitWork).toBeCalledTimes(1);
+  });
+});
diff --git a/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js b/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js
new file mode 100644
index 000000000..60327796a
--- /dev/null
+++ b/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js
@@ -0,0 +1,64 @@
+const createObjectHash = require('../../../../src/api.v2/helpers/worker/createObjectHash');
+const submitMarkerHeatmapWork = require('../../../../src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork');
+const validateAndSubmitWork = require('../../../../src/api.v2/events/validateAndSubmitWork');
+
+jest.mock('../../../../src/api.v2/helpers/worker/createObjectHash');
+jest.mock('../../../../src/api.v2/helpers/pipeline/getPipelineStatus');
+jest.mock('../../../../src/api.v2/helpers/worker/getWorkerStatus');
+jest.mock('../../../../src/api.v2/events/validateAndSubmitWork');
+
+
+const message = {
+  experimentId: '6463cb35-3e08-4e94-a181-6d155a5ca570',
+  taskName: 'configureEmbedding',
+  input: {
+    experimentId: '6463cb35-3e08-4e94-a181-6d155a5ca570',
+    taskName: 'configureEmbedding',
+    processName: 'qc',
+    sampleUuid: '',
+    uploadCountMatrix: false,
+    authJWT: 'Bearer whatever',
+    config: {
+      embeddingSettings: {
+        method: 'umap',
+        methodSettings: {
+          tsne: { perplexity: 30, learningRate: 200 },
+          umap: { distanceMetric: 'cosine', minimumDistance: 0.3 },
+        },
+      },
+      clusteringSettings: {
+        method: 'louvain',
+        methodSettings: { louvain: { resolution: 0.8 } },
+      },
+    },
+
+  },
+  output: {
+    bucket: 'worker-results-development-000000000000',
+    key: '0eabfedf-0efe-4abf-8725-7062c54ed5e1',
+  },
+  response: { error: false },
+  pipelineVersion: 2,
+  apiUrl: null,
+};
+
+describe('submitWorkEmbedding', () => {
+  // If this test fails it means you have changed parameters upon which the feature or precomputing
+  // the embedding / marker heatmp feature depends on. These parameters are duplicated
+  // in the UI / API if you have changed them here, make sure you change them in the
+  // other repository or that feature will stop working.
+  it('submits the work and the ETag / params are correct', async () => {
+    const ETag = await submitMarkerHeatmapWork(message);
+
+
+    // these are the parameters used to created the ETag and
+    // they should match exactly UI snapshot:
+    // loadMarkerGenes.defaultParams.test.js.snap
+    expect(createObjectHash.mock.calls).toMatchSnapshot();
+    // this ETag should match exactly the one in
+    // loadMarkerGenes.defaultParams.test.js
+    expect(ETag).toEqual('9db473fff00ea358446196ee3276f486'); // pragma: allowlist secret
+
+    expect(validateAndSubmitWork).toBeCalledTimes(1);
+  });
+});
diff --git a/tests/api.v2/mocks/data/getProcessingConfigResponse.json b/tests/api.v2/mocks/data/getProcessingConfigResponse.json
index 796cb1ca5..7d1b66e15 100644
--- a/tests/api.v2/mocks/data/getProcessingConfigResponse.json
+++ b/tests/api.v2/mocks/data/getProcessingConfigResponse.json
@@ -13,7 +13,7 @@
           "sample1Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": true,
             "filterSettings": {
               "binStep": 0.05,
@@ -27,7 +27,7 @@
           "sample2Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": true,
             "filterSettings": {
               "binStep": 0.05,
@@ -41,7 +41,7 @@
           "sample3Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": true,
             "filterSettings": {
               "binStep": 0.05,
@@ -55,7 +55,7 @@
         },
         "dataIntegration": {
           "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-        
+
           "dataIntegration": {
             "method": "harmony",
             "methodSettings": {
@@ -96,7 +96,7 @@
           "sample1Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": true,
             "filterSettings": {
               "regressionType": "linear",
@@ -124,7 +124,7 @@
           "sample2Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": true,
             "filterSettings": {
               "regressionType": "linear",
@@ -152,7 +152,7 @@
           "sample3Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": true,
             "filterSettings": {
               "regressionType": "linear",
@@ -180,7 +180,7 @@
         },
         "configureEmbedding": {
           "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-        
+
           "embeddingSettings": {
             "method": "umap",
             "methodSettings": {
@@ -213,7 +213,7 @@
           "sample1Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": false,
             "filterSettings": {
               "binStep": 200,
@@ -227,7 +227,7 @@
           "sample2Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": false,
             "filterSettings": {
               "binStep": 200,
@@ -241,7 +241,7 @@
           "sample3Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": false,
             "filterSettings": {
               "binStep": 200,
@@ -268,7 +268,7 @@
           "sample1Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": true,
             "filterSettings": {
               "method": "absoluteThreshold",
@@ -292,7 +292,7 @@
           "sample2Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": true,
             "filterSettings": {
               "method": "absoluteThreshold",
@@ -316,7 +316,7 @@
           "sample3Id": {
             "auto": true,
             "apiUrl": "http://api.api-default.svc.cluster.local:3000",
-          
+
             "enabled": true,
             "filterSettings": {
               "method": "absoluteThreshold",
diff --git a/tests/config/__snapshots__/default-config-production.test.js.snap b/tests/config/__snapshots__/default-config-production.test.js.snap
index eaf234fc7..5c7344e67 100644
--- a/tests/config/__snapshots__/default-config-production.test.js.snap
+++ b/tests/config/__snapshots__/default-config-production.test.js.snap
@@ -32,5 +32,6 @@ Object {
   "rdsSandboxId": "default",
   "sandboxId": "default",
   "workerNamespace": "worker-default",
+  "workerVersion": 3,
 }
 `;
diff --git a/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap b/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap
index e7e6699e3..c05e99694 100644
--- a/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap
+++ b/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap
@@ -32,5 +32,6 @@ Object {
   "rdsSandboxId": "default",
   "sandboxId": "mockedSandboxId",
   "workerNamespace": "worker-mockedSandboxId",
+  "workerVersion": 3,
 }
 `;
diff --git a/tests/config/__snapshots__/default-config-staging.test.js.snap b/tests/config/__snapshots__/default-config-staging.test.js.snap
index 9c54fc085..9eb6fdac3 100644
--- a/tests/config/__snapshots__/default-config-staging.test.js.snap
+++ b/tests/config/__snapshots__/default-config-staging.test.js.snap
@@ -32,5 +32,6 @@ Object {
   "rdsSandboxId": "default",
   "sandboxId": "default",
   "workerNamespace": "worker-default",
+  "workerVersion": 3,
 }
 `;

From 9bede8383f9eae799509ce329d2b009596ecc4cb Mon Sep 17 00:00:00 2001
From: Pol Alvarez <pol.avms@gmail.com>
Date: Sat, 18 Feb 2023 14:36:33 +0800
Subject: [PATCH 13/65] disabled detect-secrets for config snaps

---
 .secrets.baseline | 10 ++--------
 1 file changed, 2 insertions(+), 8 deletions(-)

diff --git a/.secrets.baseline b/.secrets.baseline
index 2ca39c17c..a30132ad6 100644
--- a/.secrets.baseline
+++ b/.secrets.baseline
@@ -1,14 +1,8 @@
 {
   "custom_plugin_paths": [],
   "exclude": {
-    "lines": ".*integrity.*",
-    "files": [
-      "package-lock.json",
-      "default-config-production.test.js.snap",
-      "default-config-staging.test.js.snap",
-      "default-config-staging-sandboxid.test.js.snap",
-      "tests/config/__snapshots__/default-config-production.test.js.snap"
-    ]
+    "lines": ".*integrity.*|\"datadogApiKey\": \"\",",
+    "files": "package-lock.json"
   },
   "generated_at": "2020-10-27T12:47:37Z",
   "plugins_used": [

From 4467cba86d3b7b938105dd871b4cf8b8714aebea Mon Sep 17 00:00:00 2001
From: Pol Alvarez <pol.avms@gmail.com>
Date: Sat, 18 Feb 2023 14:49:01 +0800
Subject: [PATCH 14/65] removed debug logs

---
 src/api.v2/events/validateAndSubmitWork.js                | 1 -
 .../helpers/worker/workSubmit/submitEmbeddingWork.js      | 6 ------
 .../helpers/worker/workSubmit/submitMarkerHeatmapWork.js  | 8 --------
 3 files changed, 15 deletions(-)

diff --git a/src/api.v2/events/validateAndSubmitWork.js b/src/api.v2/events/validateAndSubmitWork.js
index 0c4200f42..4c1fe9c41 100644
--- a/src/api.v2/events/validateAndSubmitWork.js
+++ b/src/api.v2/events/validateAndSubmitWork.js
@@ -6,7 +6,6 @@ const getPipelineStatus = require('../helpers/pipeline/getPipelineStatus');
 const pipelineConstants = require('../constants');
 
 const validateAndSubmitWork = async (workRequest) => {
-  console.log('validateAndSubmitWork: ', workRequest);
   const { experimentId } = workRequest;
 
 
diff --git a/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js b/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
index d8ae7f31d..ff1153a59 100644
--- a/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
+++ b/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
@@ -5,8 +5,6 @@ const createObjectHash = require('../createObjectHash');
 
 
 const submitEmbeddingWork = async (message) => {
-  console.log('payload ', message);
-
   const {
     experimentId, input:
     { authJWT, config: { embeddingSettings: { method, methodSettings } } },
@@ -17,7 +15,6 @@ const submitEmbeddingWork = async (message) => {
   const { pipeline: { startDate: qcPipelineStartDate } } = backendStatus;
 
 
-  console.log(`qcPipelineStartDate: ${qcPipelineStartDate} ${typeof (qcPipelineStartDate)}`);
   const body = {
     name: 'GetEmbedding',
     type: method,
@@ -43,10 +40,7 @@ const submitEmbeddingWork = async (message) => {
     extraDependencies,
   };
 
-  console.log('embedding ETagBody: ', ETagBody);
   const ETag = createObjectHash(ETagBody);
-  console.log('submitEmbeddingWork: embedding Etag ', ETag);
-
   const now = new Date();
   const timeout = 15 * 60 * 1000; // 15min in ms
   const timeoutDate = new Date(now.getTime() + timeout);
diff --git a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
index 2f929841a..e006fc57d 100644
--- a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
+++ b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
@@ -6,14 +6,9 @@ const getExtraDependencies = require('./getExtraDependencies');
 
 
 const submitMarkerHeatmapWork = async (message) => {
-  console.log('payload ', message);
-
   const { experimentId, input: { authJWT } } = message;
 
-  // const { resolution } = methodSettings[method];
-  // consider replacing with getPipelineStatus
   const backendStatus = await getExperimentBackendStatus(experimentId);
-  // console.log('backendStatus: ', backendStatus);
   const { pipeline: { startDate: qcPipelineStartDate } } = backendStatus;
   const numGenes = 5;
   const selectedCellSet = 'louvain';
@@ -39,10 +34,7 @@ const submitMarkerHeatmapWork = async (message) => {
     extraDependencies,
   };
 
-  console.log('marker heatmap ETagBody: ', ETagBody);
   const ETag = createObjectHash(ETagBody);
-  console.log('submitEmbeddingWork: marker heatmap Etag ', ETag);
-
   const now = new Date();
   const timeout = 15 * 60 * 1000; // 15min in ms
   const timeoutDate = new Date(now.getTime() + timeout);

From d0ac0cd22890ee26ffbc48079a2199bc10629b99 Mon Sep 17 00:00:00 2001
From: Pol Alvarez <pol.avms@gmail.com>
Date: Sun, 19 Feb 2023 22:11:34 +0800
Subject: [PATCH 15/65] added debug logs

---
 .../helpers/worker/workSubmit/submitMarkerHeatmapWork.js       | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
index e006fc57d..cde74f8d7 100644
--- a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
+++ b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
@@ -34,6 +34,7 @@ const submitMarkerHeatmapWork = async (message) => {
     extraDependencies,
   };
 
+
   const ETag = createObjectHash(ETagBody);
   const now = new Date();
   const timeout = 15 * 60 * 1000; // 15min in ms
@@ -49,6 +50,8 @@ const submitMarkerHeatmapWork = async (message) => {
 
   await validateAndSubmitWork(request);
 
+  console.log('markerHeatmap - body: ', ETagBody);
+  console.log('markerHeatmap - etag: ', ETag);
   // explicitly return ETag to make it stand out more in tests and so harder to break
   return ETag;
 };

From 8004a54864bcc4b3a7d9ce1bda5d829b11be18fd Mon Sep 17 00:00:00 2001
From: Pol Alvarez <pol.avms@gmail.com>
Date: Tue, 21 Feb 2023 14:48:53 +0800
Subject: [PATCH 16/65] refactored code into submitWork.js

---
 .../worker/workSubmit/submitEmbeddingWork.js  | 36 +-------------
 .../workSubmit/submitMarkerHeatmapWork.js     | 38 +-------------
 .../helpers/worker/workSubmit/submitWork.js   | 49 +++++++++++++++++++
 3 files changed, 53 insertions(+), 70 deletions(-)
 create mode 100644 src/api.v2/helpers/worker/workSubmit/submitWork.js

diff --git a/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js b/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
index ff1153a59..f2f64b9ce 100644
--- a/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
+++ b/src/api.v2/helpers/worker/workSubmit/submitEmbeddingWork.js
@@ -1,7 +1,4 @@
-const config = require('../../../../config');
-const validateAndSubmitWork = require('../../../events/validateAndSubmitWork');
-const getExperimentBackendStatus = require('../../backendStatus/getExperimentBackendStatus');
-const createObjectHash = require('../createObjectHash');
+const submitWork = require('./submitWork');
 
 
 const submitEmbeddingWork = async (message) => {
@@ -11,9 +8,6 @@ const submitEmbeddingWork = async (message) => {
   } = message;
 
   const embeddingConfig = methodSettings[method];
-  const backendStatus = await getExperimentBackendStatus(experimentId);
-  const { pipeline: { startDate: qcPipelineStartDate } } = backendStatus;
-
 
   const body = {
     name: 'GetEmbedding',
@@ -25,35 +19,9 @@ const submitEmbeddingWork = async (message) => {
   // src/utils/work/fetchWork.js when calling the function generateETag if this file
   // or the one in the UI has any default changed, the pre-computing of embeddings/marker heatmp
   // will stop working as the ETags will no longer match.
-  const cacheUniquenessKey = null;
-  const extras = undefined;
   const extraDependencies = [];
-  const { workerVersion } = config;
-
-  const ETagBody = {
-    experimentId,
-    body,
-    qcPipelineStartDate: qcPipelineStartDate.toISOString(),
-    extras,
-    cacheUniquenessKey,
-    workerVersion,
-    extraDependencies,
-  };
-
-  const ETag = createObjectHash(ETagBody);
-  const now = new Date();
-  const timeout = 15 * 60 * 1000; // 15min in ms
-  const timeoutDate = new Date(now.getTime() + timeout);
-  const request = {
-    ETag,
-    socketId: 'randomID',
-    experimentId,
-    authJWT,
-    timeout: timeoutDate.toISOString(),
-    body,
-  };
 
-  await validateAndSubmitWork(request);
+  const ETag = await submitWork(experimentId, authJWT, body, extraDependencies);
 
   // explicitly return ETag to make it stand out more in tests and so harder to break
   return ETag;
diff --git a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
index cde74f8d7..44607b5d8 100644
--- a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
+++ b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
@@ -1,15 +1,10 @@
-const createObjectHash = require('../createObjectHash');
-const config = require('../../../../config');
-const validateAndSubmitWork = require('../../../events/validateAndSubmitWork');
-const getExperimentBackendStatus = require('../../backendStatus/getExperimentBackendStatus');
 const getExtraDependencies = require('./getExtraDependencies');
+const submitWork = require('./submitWork');
 
 
 const submitMarkerHeatmapWork = async (message) => {
   const { experimentId, input: { authJWT } } = message;
 
-  const backendStatus = await getExperimentBackendStatus(experimentId);
-  const { pipeline: { startDate: qcPipelineStartDate } } = backendStatus;
   const numGenes = 5;
   const selectedCellSet = 'louvain';
 
@@ -19,39 +14,10 @@ const submitMarkerHeatmapWork = async (message) => {
     cellSetKey: selectedCellSet,
   };
 
-  const cacheUniquenessKey = null;
 
-  const extras = undefined;
   const extraDependencies = await getExtraDependencies(body.name, message);
-  const { workerVersion } = config;
-  const ETagBody = {
-    experimentId,
-    body,
-    qcPipelineStartDate: qcPipelineStartDate.toISOString(),
-    extras,
-    cacheUniquenessKey,
-    workerVersion,
-    extraDependencies,
-  };
-
-
-  const ETag = createObjectHash(ETagBody);
-  const now = new Date();
-  const timeout = 15 * 60 * 1000; // 15min in ms
-  const timeoutDate = new Date(now.getTime() + timeout);
-  const request = {
-    ETag,
-    socketId: 'randomID',
-    experimentId,
-    authJWT,
-    timeout: timeoutDate.toISOString(),
-    body,
-  };
-
-  await validateAndSubmitWork(request);
+  const ETag = await submitWork(experimentId, authJWT, body, extraDependencies);
 
-  console.log('markerHeatmap - body: ', ETagBody);
-  console.log('markerHeatmap - etag: ', ETag);
   // explicitly return ETag to make it stand out more in tests and so harder to break
   return ETag;
 };
diff --git a/src/api.v2/helpers/worker/workSubmit/submitWork.js b/src/api.v2/helpers/worker/workSubmit/submitWork.js
new file mode 100644
index 000000000..3695770d3
--- /dev/null
+++ b/src/api.v2/helpers/worker/workSubmit/submitWork.js
@@ -0,0 +1,49 @@
+const config = require('../../../../config');
+const validateAndSubmitWork = require('../../../events/validateAndSubmitWork');
+const getExperimentBackendStatus = require('../../backendStatus/getExperimentBackendStatus');
+const createObjectHash = require('../createObjectHash');
+
+
+const submitWork = async (experimentId, authJWT, body, extraDependencies) => {
+  const backendStatus = await getExperimentBackendStatus(experimentId);
+  const { pipeline: { startDate: qcPipelineStartDate } } = backendStatus;
+
+
+  // these values need to match explicitly the default ones defined in the UI at
+  // src/utils/work/fetchWork.js when calling the function generateETag if this file
+  // or the one in the UI has any default changed, the pre-computing of embeddings/marker heatmp
+  // will stop working as the ETags will no longer match.
+  const cacheUniquenessKey = null;
+  const extras = undefined;
+  const { workerVersion } = config;
+
+  const ETagBody = {
+    experimentId,
+    body,
+    qcPipelineStartDate: qcPipelineStartDate.toISOString(),
+    extras,
+    cacheUniquenessKey,
+    workerVersion,
+    extraDependencies,
+  };
+
+  const ETag = createObjectHash(ETagBody);
+  const now = new Date();
+  const timeout = 15 * 60 * 1000; // 15min in ms
+  const timeoutDate = new Date(now.getTime() + timeout);
+  const request = {
+    ETag,
+    socketId: 'randomID',
+    experimentId,
+    authJWT,
+    timeout: timeoutDate.toISOString(),
+    body,
+  };
+
+  await validateAndSubmitWork(request);
+
+  // explicitly return ETag to make it stand out more in tests and so harder to break
+  return ETag;
+};
+
+module.exports = submitWork;

From 84617e2ae768dfce60da248e8e18212b860676ac Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Wed, 22 Feb 2023 09:04:39 -0300
Subject: [PATCH 17/65] Rename

---
 .../helpers/pipeline/pipelineConstruct/qcHelpers.js       | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js b/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
index e43e4b37a..58412bf02 100644
--- a/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
+++ b/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
@@ -40,13 +40,13 @@ const getFirstQCStep = async (experimentId, processingConfigUpdates, backendComp
   // Choose the earliestStep by checking:
   // if pendingSteps includes it, then pendingSteps has the earliest step
   // if not, earliestChangedStep is the earliest step
-  const earliestStep = (!earliestChangedStep || pendingSteps.includes(earliestChangedStep))
+  const firstStep = (!earliestChangedStep || pendingSteps.includes(earliestChangedStep))
     ? pendingSteps[0] : earliestChangedStep;
 
   // if the earlist step to run is the first one, just return it without
   // further checks
-  if (earliestStep === qcStepNames[0]) {
-    return earliestStep;
+  if (firstStep === qcStepNames[0]) {
+    return firstStep;
   }
   // if the first step to run is not the first in the pipeline (stepNames[0])
   // then check if the experiment supports starting the pipeline from any step
@@ -55,7 +55,7 @@ const getFirstQCStep = async (experimentId, processingConfigUpdates, backendComp
   // make a more costly call to S3 to check if the file exists
   const hasCellIds = await hasFilteredCellIdsAvailable(experimentId);
   if (hasCellIds) {
-    return earliestStep;
+    return firstStep;
   }
 
 

From 3de322e19752b3acab7e96a7724a0e792f5526ce Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Wed, 22 Feb 2023 09:05:10 -0300
Subject: [PATCH 18/65] Rename to firstChangedStep

---
 .../helpers/pipeline/pipelineConstruct/qcHelpers.js       | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js b/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
index 58412bf02..2dfec2765 100644
--- a/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
+++ b/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
@@ -20,14 +20,14 @@ const hasFilteredCellIdsAvailable = async (experimentId) => (
 // getFirstQCStep returns which is the first step of the QC to be run
 // processingConfigUpdates is not ordered
 const getFirstQCStep = async (experimentId, processingConfigUpdates, backendCompletedSteps) => {
-  let earliestChangedStep;
+  let firstChangedStep;
   let earliestIdx = 9999;
   processingConfigUpdates.forEach(({ name }) => {
     const stepName = filterToStepName[name];
     const idx = qcStepNames.indexOf(stepName);
     if (idx < earliestIdx) {
       earliestIdx = idx;
-      earliestChangedStep = stepName;
+      firstChangedStep = stepName;
     }
   });
 
@@ -40,8 +40,8 @@ const getFirstQCStep = async (experimentId, processingConfigUpdates, backendComp
   // Choose the earliestStep by checking:
   // if pendingSteps includes it, then pendingSteps has the earliest step
   // if not, earliestChangedStep is the earliest step
-  const firstStep = (!earliestChangedStep || pendingSteps.includes(earliestChangedStep))
-    ? pendingSteps[0] : earliestChangedStep;
+  const firstStep = (!firstChangedStep || pendingSteps.includes(firstChangedStep))
+    ? pendingSteps[0] : firstChangedStep;
 
   // if the earlist step to run is the first one, just return it without
   // further checks

From fc1fba7aad30626867f5b28210d419a26cb37193 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Wed, 22 Feb 2023 09:10:58 -0300
Subject: [PATCH 19/65] Update comments

---
 .../pipeline/pipelineConstruct/qcHelpers.js    | 18 +++++++++++-------
 1 file changed, 11 insertions(+), 7 deletions(-)

diff --git a/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js b/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
index 2dfec2765..24ca206d1 100644
--- a/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
+++ b/src/api.v2/helpers/pipeline/pipelineConstruct/qcHelpers.js
@@ -35,15 +35,19 @@ const getFirstQCStep = async (experimentId, processingConfigUpdates, backendComp
     (currentStep) => backendStepNamesToStepName[currentStep],
   );
 
-  const pendingSteps = _.difference(qcStepNames, completedSteps);
+  const notCompletedSteps = _.difference(qcStepNames, completedSteps);
 
-  // Choose the earliestStep by checking:
-  // if pendingSteps includes it, then pendingSteps has the earliest step
-  // if not, earliestChangedStep is the earliest step
-  const firstStep = (!firstChangedStep || pendingSteps.includes(firstChangedStep))
-    ? pendingSteps[0] : firstChangedStep;
+  // notCompletedSteps: the steps that have not been run for the currently persisted qc config
+  // firstChangedStep: the first step that introduces a new change to the persisted qc config
+  // We need to rerun all the changed steps and all the notCompletedSteps,
+  // so start from whichever is earlier: firstChangedStep or first notCompletedStep
+  // We do this by checking notCompletedSteps:
+  // - if it includes firstChangedStep then we can start from notCompletedStep[0]
+  // - if it doesn't, then firstChangedStep is earlier, so start from it
+  const firstStep = (!firstChangedStep || notCompletedSteps.includes(firstChangedStep))
+    ? notCompletedSteps[0] : firstChangedStep;
 
-  // if the earlist step to run is the first one, just return it without
+  // if firstStep is the first out of all of qc, just return it without
   // further checks
   if (firstStep === qcStepNames[0]) {
     return firstStep;

From 4f65ed09e5462fc13c518284b58ee91e7bcd23d1 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Wed, 22 Feb 2023 09:47:08 -0300
Subject: [PATCH 20/65] Add some more comments

---
 .../helpers/pipeline/getPipelineStatus.js     | 34 +++++++++++++++----
 1 file changed, 28 insertions(+), 6 deletions(-)

diff --git a/src/api.v2/helpers/pipeline/getPipelineStatus.js b/src/api.v2/helpers/pipeline/getPipelineStatus.js
index 56b9ae9e2..89e4e2668 100644
--- a/src/api.v2/helpers/pipeline/getPipelineStatus.js
+++ b/src/api.v2/helpers/pipeline/getPipelineStatus.js
@@ -213,7 +213,25 @@ const getStepsFromExecutionHistory = (events) => {
   return shortestCompletedToReport || [];
 };
 
-const getCompletedSteps = async (processName, stateMachineArn, executedSteps, stepFunctions) => {
+/**
+ *
+ * @param {*} processName The name of the pipeline to get the steps for,
+ * currently either qc or gem2s
+ * @param {*} stateMachineArn
+ * @param {*} lastRunExecutedSteps The steps that were executed in the last run
+ * @param {*} stepFunctions stepFunctions client
+ * @returns array of steps that can be considered completed
+ *
+ * If processName = gem2s, it returns executedSteps because we don't support partial reruns
+ *
+ * If processName = qc: it returns lastRunExecutedSteps + stepsCompletedInPreviousRuns
+ * stepsCompletedInPreviousRuns is all the steps that weren't scheduled to run in the last run
+ * The only reason we don't schedule steps is when we consider them completed,
+ * so we can still consider them completed
+ */
+const getCompletedSteps = async (
+  processName, stateMachineArn, lastRunExecutedSteps, stepFunctions,
+) => {
   let completedSteps;
 
   if (processName === 'qc') {
@@ -221,15 +239,19 @@ const getCompletedSteps = async (processName, stateMachineArn, executedSteps, st
       stateMachineArn,
     }).promise();
 
+    // Get all the steps that were scheduled to be run in the last execution
+    const lastScheduledSteps = Object.keys(JSON.parse(stateMachine.definition).States);
 
-    const stepFunctionSteps = Object.keys(JSON.parse(stateMachine.definition).States);
-
-    const qcStepsCompletedPreviousRuns = _.difference(qcStepNames, stepFunctionSteps)
+    // Remove from all qc steps the ones that were scheduled for execution in the last run
+    // We are left with all the qc steps that last run didn't consider necessary to rerun
+    // This means that these steps were considered completed in the last run so
+    // we can still consider them completed
+    const stepsCompletedInPreviousRuns = _.difference(qcStepNames, lastScheduledSteps)
       .map((rawStepName) => stepNameToBackendStepNames[rawStepName]);
 
-    completedSteps = qcStepsCompletedPreviousRuns.concat(executedSteps);
+    completedSteps = stepsCompletedInPreviousRuns.concat(lastRunExecutedSteps);
   } if (processName === 'gem2s') {
-    completedSteps = executedSteps;
+    completedSteps = lastRunExecutedSteps;
   }
 
   return completedSteps;

From 6681d0a2e89456a66c6bc42be6dcd4f164b1f0e8 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Wed, 22 Feb 2023 09:49:53 -0300
Subject: [PATCH 21/65] More comment

---
 src/api.v2/helpers/pipeline/getPipelineStatus.js | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/src/api.v2/helpers/pipeline/getPipelineStatus.js b/src/api.v2/helpers/pipeline/getPipelineStatus.js
index 89e4e2668..e95234260 100644
--- a/src/api.v2/helpers/pipeline/getPipelineStatus.js
+++ b/src/api.v2/helpers/pipeline/getPipelineStatus.js
@@ -222,7 +222,8 @@ const getStepsFromExecutionHistory = (events) => {
  * @param {*} stepFunctions stepFunctions client
  * @returns array of steps that can be considered completed
  *
- * If processName = gem2s, it returns executedSteps because we don't support partial reruns
+ * If processName = gem2s, it returns executedSteps because we don't support partial reruns so
+ * we can always assume all executedSteps are all completed steps
  *
  * If processName = qc: it returns lastRunExecutedSteps + stepsCompletedInPreviousRuns
  * stepsCompletedInPreviousRuns is all the steps that weren't scheduled to run in the last run

From 0c1103ac8599dbf94cf559bf3d396f058b596e5e Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Wed, 22 Feb 2023 09:50:34 -0300
Subject: [PATCH 22/65] IMprove comments

---
 src/api.v2/helpers/pipeline/getPipelineStatus.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/api.v2/helpers/pipeline/getPipelineStatus.js b/src/api.v2/helpers/pipeline/getPipelineStatus.js
index e95234260..2f67b7a00 100644
--- a/src/api.v2/helpers/pipeline/getPipelineStatus.js
+++ b/src/api.v2/helpers/pipeline/getPipelineStatus.js
@@ -228,7 +228,7 @@ const getStepsFromExecutionHistory = (events) => {
  * If processName = qc: it returns lastRunExecutedSteps + stepsCompletedInPreviousRuns
  * stepsCompletedInPreviousRuns is all the steps that weren't scheduled to run in the last run
  * The only reason we don't schedule steps is when we consider them completed,
- * so we can still consider them completed
+ * so we can keep considering them completed for future runs as well
  */
 const getCompletedSteps = async (
   processName, stateMachineArn, lastRunExecutedSteps, stepFunctions,

From d12a3108b125e9c3f502874c6613504fbdbc7b6c Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Thu, 23 Feb 2023 11:38:12 +0000
Subject: [PATCH 23/65] invalidate on cell sets change

---
 src/utils/plotConfigInvalidation/invalidatePlotsForEvent.js | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/src/utils/plotConfigInvalidation/invalidatePlotsForEvent.js b/src/utils/plotConfigInvalidation/invalidatePlotsForEvent.js
index c8f996ea2..598299000 100644
--- a/src/utils/plotConfigInvalidation/invalidatePlotsForEvent.js
+++ b/src/utils/plotConfigInvalidation/invalidatePlotsForEvent.js
@@ -15,7 +15,7 @@ const plots = {
   },
   TRAJECTORY_ANALYSIS: {
     plotIdMatcher: 'trajectoryAnalysisMain',
-    keys: ['selectedNodes'],
+    keys: ['selectedNodes', 'selectedCellSets'],
   },
   CONTINUOUS_EMBEDDING: {
     plotIdMatcher: 'embeddingContinuousMain',
@@ -62,6 +62,7 @@ const plotsChangedByEvent = {
     plots.DOT_PLOT,
     plots.NORMALIZED_MATRIX,
     plots.VOLCANO_PLOT,
+    plots.TRAJECTORY_ANALYSIS,
   ],
   [events.EMBEDDING_MODIFIED]: [
     plots.TRAJECTORY_ANALYSIS,

From ade7e4c107dc603fc3578d86cbd6ccb42eae82d0 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Thu, 23 Feb 2023 12:56:40 +0000
Subject: [PATCH 24/65] fix test

---
 .../api.v2/mocks/data/invalidationResults.js  | 33 +++++++++++++++++--
 .../invalidatePlotsForEvent.test.js.snap      |  9 +++++
 2 files changed, 40 insertions(+), 2 deletions(-)

diff --git a/tests/api.v2/mocks/data/invalidationResults.js b/tests/api.v2/mocks/data/invalidationResults.js
index 8877f6c9b..630885a4d 100644
--- a/tests/api.v2/mocks/data/invalidationResults.js
+++ b/tests/api.v2/mocks/data/invalidationResults.js
@@ -202,6 +202,34 @@ const invalidationResults = {
       },
     }],
     'normalized-matrix': [{ id: 'normalized-matrix', config: {} }],
+    trajectoryAnalysisMain: [{
+      id: 'trajectoryAnalysisMain',
+      config: {
+        axes: {
+          offset: 0, gridWidth: 10, xAxisText: 'Umap 1', yAxisText: 'Umap 2', domainWidth: 2, gridOpacity: 0, defaultValues: ['x', 'y'], labelFontSize: 12, titleFontSize: 13, xAxisRotateLabels: false,
+        },
+        spec: '1.0.0',
+        title: {
+          dx: 10, text: '', anchor: 'start', fontSize: 20,
+        },
+        colour: {
+          invert: 'standard', gradient: 'default', masterColour: '#000000', toggleInvert: '#FFFFFF', reverseColourBar: false,
+        },
+        labels: { size: 18, enabled: false },
+        legend: { colour: '#000000', enabled: true, position: 'top' },
+        marker: {
+          size: 20, shape: 'circle', opacity: 5, showOpacity: true,
+        },
+        fontStyle: { font: 'sans-serif', colour: '#000000' },
+        axesRanges: {
+          xMax: 10, xMin: 0, yMax: 10, yMin: 0, xAxisAuto: true, yAxisAuto: true,
+        },
+        dimensions: { width: 700, height: 550 },
+        selectedCellSets: ['louvain'],
+        embeddingCellSet: 'louvain',
+        embeddingSample: 'All',
+      },
+    }],
     heatmapPlotMain: [],
     volcanoPlotMain: [],
   },
@@ -229,8 +257,9 @@ const invalidationResults = {
           xMax: 10, xMin: 0, yMax: 10, yMin: 0, xAxisAuto: true, yAxisAuto: true,
         },
         dimensions: { width: 700, height: 550 },
-        selectedSample: 'All',
-        selectedCellSet: 'louvain',
+        selectedCellSets: ['louvain'],
+        embeddingCellSet: 'louvain',
+        embeddingSample: 'All',
       },
     }],
   },
diff --git a/tests/utils/plotConfigInvalidation/__snapshots__/invalidatePlotsForEvent.test.js.snap b/tests/utils/plotConfigInvalidation/__snapshots__/invalidatePlotsForEvent.test.js.snap
index 8ff9cd38f..158c78808 100644
--- a/tests/utils/plotConfigInvalidation/__snapshots__/invalidatePlotsForEvent.test.js.snap
+++ b/tests/utils/plotConfigInvalidation/__snapshots__/invalidatePlotsForEvent.test.js.snap
@@ -80,6 +80,14 @@ Array [
       "basis",
     ],
   ],
+  Array [
+    "mockExperimentId",
+    "trajectoryAnalysisMain",
+    Array [
+      "selectedNodes",
+      "cellSets",
+    ],
+  ],
 ]
 `;
 
@@ -90,6 +98,7 @@ Array [
     "trajectoryAnalysisMain",
     Array [
       "selectedNodes",
+      "cellSets",
     ],
   ],
 ]

From d4be0e53d7de07e136c650d5da1ee3b616b592d5 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Thu, 23 Feb 2023 13:13:02 +0000
Subject: [PATCH 25/65] update snapshot

---
 .../__snapshots__/invalidatePlotsForEvent.test.js.snap        | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/tests/utils/plotConfigInvalidation/__snapshots__/invalidatePlotsForEvent.test.js.snap b/tests/utils/plotConfigInvalidation/__snapshots__/invalidatePlotsForEvent.test.js.snap
index 158c78808..638969256 100644
--- a/tests/utils/plotConfigInvalidation/__snapshots__/invalidatePlotsForEvent.test.js.snap
+++ b/tests/utils/plotConfigInvalidation/__snapshots__/invalidatePlotsForEvent.test.js.snap
@@ -85,7 +85,7 @@ Array [
     "trajectoryAnalysisMain",
     Array [
       "selectedNodes",
-      "cellSets",
+      "selectedCellSets",
     ],
   ],
 ]
@@ -98,7 +98,7 @@ Array [
     "trajectoryAnalysisMain",
     Array [
       "selectedNodes",
-      "cellSets",
+      "selectedCellSets",
     ],
   ],
 ]

From 84a8468ac8dc011b4a4fa7f2203709df959316bf Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Thu, 23 Feb 2023 15:45:38 -0300
Subject: [PATCH 26/65] Add migration
 20230223175139_skip_plot_s3_delete_if_path_is_null

---
 ...139_skip_plot_s3_delete_if_path_is_null.js | 92 +++++++++++++++++++
 1 file changed, 92 insertions(+)
 create mode 100644 src/sql/migrations/20230223175139_skip_plot_s3_delete_if_path_is_null.js

diff --git a/src/sql/migrations/20230223175139_skip_plot_s3_delete_if_path_is_null.js b/src/sql/migrations/20230223175139_skip_plot_s3_delete_if_path_is_null.js
new file mode 100644
index 000000000..2da4fc8ef
--- /dev/null
+++ b/src/sql/migrations/20230223175139_skip_plot_s3_delete_if_path_is_null.js
@@ -0,0 +1,92 @@
+const { PLOTS } = require('../../config/bucketNames');
+
+const getTriggerFunction = (dbEnv, key, bucketName) => {
+  let body = '';
+  const triggerLambdaARN = `arn:aws:lambda:${process.env.AWS_REGION}:${process.env.AWS_ACCOUNT_ID}:function:delete-s3-file-lambda-${dbEnv}`;
+
+  // Removing the environment and account id from the bucket name.
+  // When making a migration, the environment would be development,
+  // due to the fact that the migration is ran locally,
+  // so we need to add the environment and accountID in the lambda itself
+  const rawBucketName = bucketName.split('-').slice(0, -2).join('-');
+
+  // We skip creation of the triggers and functions in development
+  // because it requires aws_commons and aws_lambda modules which are proprietary.
+  if (['production', 'staging'].includes(dbEnv)) {
+    body = `PERFORM aws_lambda.invoke('${triggerLambdaARN}', json_build_object('key',OLD.${key}, 'bucketName', '${rawBucketName}'), '${process.env.AWS_REGION}', 'Event');`;
+  }
+
+  return body;
+};
+
+const createDeletePlotTriggerNewFunc = (env) => {
+  const body = getTriggerFunction(env, 's3_data_key', PLOTS);
+
+  const template = `
+      CREATE OR REPLACE FUNCTION public.delete_file_from_s3_after_plot_delete()
+        RETURNS trigger
+        LANGUAGE plpgsql
+      AS $function$
+      BEGIN
+        IF OLD.s3_data_key IS NOT NULL THEN 
+          ${body}
+        END IF;
+        return OLD;
+      END;
+      $function$;
+
+      CREATE TRIGGER delete_file_from_s3_after_plot_delete_trigger
+      AFTER DELETE ON plot
+      FOR EACH ROW EXECUTE FUNCTION public.delete_file_from_s3_after_plot_delete();
+    `;
+
+  return template;
+};
+
+const createDeletePlotTriggerOldFunc = (env) => {
+  const body = getTriggerFunction(env, 's3_data_key', PLOTS);
+
+  const template = `
+      CREATE OR REPLACE FUNCTION public.delete_file_from_s3_after_plot_delete()
+        RETURNS trigger
+        LANGUAGE plpgsql
+      AS $function$
+      BEGIN
+        ${body}
+        return OLD;
+      END;
+      $function$;
+
+      CREATE TRIGGER delete_file_from_s3_after_plot_delete_trigger
+      AFTER DELETE ON plot
+      FOR EACH ROW EXECUTE FUNCTION public.delete_file_from_s3_after_plot_delete();
+    `;
+
+  return template;
+};
+
+exports.up = async (knex) => {
+  if (!process.env.AWS_REGION) {
+    throw new Error('Environment variables AWS_REGION and AWS_ACCOUNT_ID are required');
+  }
+
+  if (!process.env.AWS_ACCOUNT_ID) {
+    throw new Error('Environment variables AWS_REGION and AWS_ACCOUNT_ID are required');
+  }
+
+  await knex.raw(`
+    DROP TRIGGER IF EXISTS delete_file_from_s3_after_plot_delete_trigger ON plot;
+    DROP FUNCTION IF EXISTS public.delete_file_from_s3_after_plot_delete;
+  `);
+
+  await knex.raw(createDeletePlotTriggerNewFunc(process.env.NODE_ENV));
+};
+
+exports.down = async (knex) => {
+  await knex.raw(`
+    DROP TRIGGER IF EXISTS delete_file_from_s3_after_plot_delete_trigger ON plot;
+    DROP FUNCTION IF EXISTS public.delete_file_from_s3_after_plot_delete;
+  `);
+
+  await knex.raw(createDeletePlotTriggerOldFunc(process.env.NODE_ENV));
+};

From 836a0020f3846f0bccf9d020b3aed59fa2af846a Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Fri, 24 Feb 2023 12:40:28 +0000
Subject: [PATCH 27/65] allow track name to use space

---
 src/api.v2/controllers/metadataTrackController.js | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/src/api.v2/controllers/metadataTrackController.js b/src/api.v2/controllers/metadataTrackController.js
index 9ec8299b4..329d1c01d 100644
--- a/src/api.v2/controllers/metadataTrackController.js
+++ b/src/api.v2/controllers/metadataTrackController.js
@@ -85,13 +85,16 @@ const parseMetadataFromTSV = (data, sampleNameToId) => {
 
   const result = data.trim().split('\n').map((line, index) => {
     // check that there are 3 elements per line
-    const elements = line.split('\t');
+    const elements = line.trim().split('\t');
     if (elements.length !== 3) {
       invalidLines.push(index + 1);
     }
 
     // check that the sample name exists in the experiment
-    const [sampleName, metadataKey, metadataValue] = elements;
+    const sampleName = elements[0];
+    const metadataKey = elements[1].replace(/\s+/, '_');
+    const metadataValue = elements[2];
+
     if (!(sampleName in sampleNameToId)) {
       invalidSamples.add(sampleName);
     }

From 574f76250186f70981ec00c49c4826c32d5fec57 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Fri, 24 Feb 2023 13:58:24 +0000
Subject: [PATCH 28/65] add test for parsing tsv

---
 .../controllers/metadataTrackController.js    |   1 +
 .../metadataTrackController.test.js.snap      | 110 ++++++++++++++++++
 .../metadataTrackController.test.js           |  49 +++++++-
 tests/api.v2/mocks/data/metadata.tsv          |   8 ++
 .../mocks/data/metadataInvalidLines.tsv       |   8 ++
 .../mocks/data/metadataInvalidSamples.tsv     |   8 ++
 .../mocks/data/metadataWithLineSpaces.tsv     |   8 ++
 .../mocks/data/metadataWithTrackSpaces.tsv    |   8 ++
 8 files changed, 199 insertions(+), 1 deletion(-)
 create mode 100644 tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
 create mode 100644 tests/api.v2/mocks/data/metadata.tsv
 create mode 100644 tests/api.v2/mocks/data/metadataInvalidLines.tsv
 create mode 100644 tests/api.v2/mocks/data/metadataInvalidSamples.tsv
 create mode 100644 tests/api.v2/mocks/data/metadataWithLineSpaces.tsv
 create mode 100644 tests/api.v2/mocks/data/metadataWithTrackSpaces.tsv

diff --git a/src/api.v2/controllers/metadataTrackController.js b/src/api.v2/controllers/metadataTrackController.js
index 329d1c01d..db9b91fbf 100644
--- a/src/api.v2/controllers/metadataTrackController.js
+++ b/src/api.v2/controllers/metadataTrackController.js
@@ -139,4 +139,5 @@ module.exports = {
   deleteMetadataTrack,
   patchValueForSample,
   createMetadataFromFile,
+  parseMetadataFromTSV,
 };
diff --git a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
new file mode 100644
index 000000000..1b3d06ac9
--- /dev/null
+++ b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
@@ -0,0 +1,110 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`metadataTrackController parseMetadataFromTSV can parse metadata tracks with spaces 1`] = `
+Array [
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 1",
+    "sampleId": "mockSample1",
+  },
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 2",
+    "sampleId": "mockSample2",
+  },
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 3",
+    "sampleId": "mockSample3",
+  },
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 1",
+    "sampleId": "mockSample1",
+  },
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 2",
+    "sampleId": "mockSample2",
+  },
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 3",
+    "sampleId": "mockSample3",
+  },
+]
+`;
+
+exports[`metadataTrackController parseMetadataFromTSV parses correctly 1`] = `
+Array [
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 1",
+    "sampleId": "mockSample1",
+  },
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 2",
+    "sampleId": "mockSample2",
+  },
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 3",
+    "sampleId": "mockSample3",
+  },
+  Object {
+    "metadataKey": "track_2",
+    "metadataValue": "value 1",
+    "sampleId": "mockSample1",
+  },
+  Object {
+    "metadataKey": "track_2",
+    "metadataValue": "value 2",
+    "sampleId": "mockSample2",
+  },
+  Object {
+    "metadataKey": "track_2",
+    "metadataValue": "value 3",
+    "sampleId": "mockSample3",
+  },
+]
+`;
+
+exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid lines 1`] = `"Invalid lines: 1, 4, 5"`;
+
+exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid samples 1`] = `"Invalid sample names: sample A, sample C"`;
+
+exports[`metadataTrackController parseMetadataFromTSV tolerates spaces after a line 1`] = `
+Array [
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 1",
+    "sampleId": "mockSample1",
+  },
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 2",
+    "sampleId": "mockSample2",
+  },
+  Object {
+    "metadataKey": "track_1",
+    "metadataValue": "value 3",
+    "sampleId": "mockSample3",
+  },
+  Object {
+    "metadataKey": "track_2",
+    "metadataValue": "value 1",
+    "sampleId": "mockSample1",
+  },
+  Object {
+    "metadataKey": "track_2",
+    "metadataValue": "value 2",
+    "sampleId": "mockSample2",
+  },
+  Object {
+    "metadataKey": "track_2",
+    "metadataValue": "value 3",
+    "sampleId": "mockSample3",
+  },
+]
+`;
diff --git a/tests/api.v2/controllers/metadataTrackController.test.js b/tests/api.v2/controllers/metadataTrackController.test.js
index 78cea80a6..0c6c7feef 100644
--- a/tests/api.v2/controllers/metadataTrackController.test.js
+++ b/tests/api.v2/controllers/metadataTrackController.test.js
@@ -1,13 +1,20 @@
 // @ts-nocheck
+const fs = require('fs');
+const path = require('path');
 const metadataTrackController = require('../../../src/api.v2/controllers/metadataTrackController');
 const { OK, NotFoundError, BadRequestError } = require('../../../src/utils/responses');
 const MetadataTrack = require('../../../src/api.v2/model/MetadataTrack');
 const Sample = require('../../../src/api.v2/model/Sample');
-const BasicModel = require('../../../src/api.v2/model/BasicModel');
 
 const metadataTrackInstance = new MetadataTrack();
 const sampleInstance = new Sample();
 
+const mockMetadataSampleNameToId = {
+  'sample 1': 'mockSample1',
+  'sample 2': 'mockSample2',
+  'sample 3': 'mockSample3',
+};
+
 jest.mock('../../../src/api.v2/model/MetadataTrack');
 jest.mock('../../../src/api.v2/model/Sample');
 
@@ -214,4 +221,44 @@ describe('metadataTrackController', () => {
       metadataTrackController.createMetadataFromFile(mockReq, mockRes),
     ).rejects.toThrowError(BadRequestError);
   });
+
+  it('parseMetadataFromTSV parses correctly', () => {
+    const mockData = fs.readFileSync(path.join(__dirname, '../mocks/data/metadata.tsv'), { encoding: 'utf-8' });
+    const result = metadataTrackController.parseMetadataFromTSV(
+      mockData, mockMetadataSampleNameToId,
+    );
+    expect(result).toMatchSnapshot();
+  });
+
+  it('parseMetadataFromTSV throws error if there are invalid samples', () => {
+    const mockData = fs.readFileSync(path.join(__dirname, '../mocks/data/metadataInvalidSamples.tsv'), { encoding: 'utf-8' });
+
+    expect(() => {
+      metadataTrackController.parseMetadataFromTSV(mockData, mockMetadataSampleNameToId);
+    }).toThrowErrorMatchingSnapshot();
+  });
+
+  it('parseMetadataFromTSV throws error if there are invalid lines', () => {
+    const mockData = fs.readFileSync(path.join(__dirname, '../mocks/data/metadataInvalidLines.tsv'), { encoding: 'utf-8' });
+
+    expect(() => {
+      metadataTrackController.parseMetadataFromTSV(mockData, mockMetadataSampleNameToId);
+    }).toThrowErrorMatchingSnapshot();
+  });
+
+  it('parseMetadataFromTSV tolerates spaces after a line', () => {
+    const mockData = fs.readFileSync(path.join(__dirname, '../mocks/data/metadataWithTrackSpaces.tsv'), { encoding: 'utf-8' });
+    const result = metadataTrackController.parseMetadataFromTSV(
+      mockData, mockMetadataSampleNameToId,
+    );
+    expect(result).toMatchSnapshot();
+  });
+
+  it('parseMetadataFromTSV can parse metadata tracks with spaces', () => {
+    const mockData = fs.readFileSync(path.join(__dirname, '../mocks/data/metadataWithLineSpaces.tsv'), { encoding: 'utf-8' });
+    const result = metadataTrackController.parseMetadataFromTSV(
+      mockData, mockMetadataSampleNameToId,
+    );
+    expect(result).toMatchSnapshot();
+  });
 });
diff --git a/tests/api.v2/mocks/data/metadata.tsv b/tests/api.v2/mocks/data/metadata.tsv
new file mode 100644
index 000000000..72a4b6a67
--- /dev/null
+++ b/tests/api.v2/mocks/data/metadata.tsv
@@ -0,0 +1,8 @@
+
+
+sample 1	track_1	value 1
+sample 2	track_1	value 2
+sample 3	track_1	value 3
+sample 1	track_2	value 1
+sample 2	track_2	value 2
+sample 3	track_2	value 3
diff --git a/tests/api.v2/mocks/data/metadataInvalidLines.tsv b/tests/api.v2/mocks/data/metadataInvalidLines.tsv
new file mode 100644
index 000000000..eefd94a27
--- /dev/null
+++ b/tests/api.v2/mocks/data/metadataInvalidLines.tsv
@@ -0,0 +1,8 @@
+
+
+sample 1	track_1
+sample 2	track_1	value 2
+sample 3	track_1	value 3
+sample 1	track_2
+sample 2	track_2
+sample 3	track_2	value 3
diff --git a/tests/api.v2/mocks/data/metadataInvalidSamples.tsv b/tests/api.v2/mocks/data/metadataInvalidSamples.tsv
new file mode 100644
index 000000000..a35cdea94
--- /dev/null
+++ b/tests/api.v2/mocks/data/metadataInvalidSamples.tsv
@@ -0,0 +1,8 @@
+
+
+sample A	track_1	value 1
+sample 2	track_1	value 2
+sample C	track_1	value 3
+sample A	track_2	value 1
+sample 2	track_2	value 2
+sample 3	track_2	value 3
diff --git a/tests/api.v2/mocks/data/metadataWithLineSpaces.tsv b/tests/api.v2/mocks/data/metadataWithLineSpaces.tsv
new file mode 100644
index 000000000..3528003e3
--- /dev/null
+++ b/tests/api.v2/mocks/data/metadataWithLineSpaces.tsv
@@ -0,0 +1,8 @@
+
+
+sample 1	track_1	value 1    
+sample 2	track_1	value 2		 
+sample 3	track_1	value 3 	
+	sample 1	track_1	value 1
+   sample 2	track_1	value 2
+ sample 3	track_1	value 3
diff --git a/tests/api.v2/mocks/data/metadataWithTrackSpaces.tsv b/tests/api.v2/mocks/data/metadataWithTrackSpaces.tsv
new file mode 100644
index 000000000..192d8f80f
--- /dev/null
+++ b/tests/api.v2/mocks/data/metadataWithTrackSpaces.tsv
@@ -0,0 +1,8 @@
+
+
+sample 1	track 1	value 1
+sample 2	track 1	value 2
+sample 3	track 1	value 3
+sample 1	track 2	value 1
+sample 2	track 2	value 2
+sample 3	track 2	value 3

From aaf910580593484a7a4d2857ca09b0cd30317257 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Fri, 24 Feb 2023 16:54:11 +0000
Subject: [PATCH 29/65] add check for duplicated track

---
 src/api.v2/controllers/metadataTrackController.js | 15 ++++++++++++++-
 .../metadataTrackController.test.js.snap          |  8 +++++---
 .../controllers/metadataTrackController.test.js   |  8 ++++++++
 .../mocks/data/metadataInvalidDuplicates.tsv      | 10 ++++++++++
 .../api.v2/mocks/data/metadataWithLineSpaces.tsv  |  6 +++---
 5 files changed, 40 insertions(+), 7 deletions(-)
 create mode 100644 tests/api.v2/mocks/data/metadataInvalidDuplicates.tsv

diff --git a/src/api.v2/controllers/metadataTrackController.js b/src/api.v2/controllers/metadataTrackController.js
index db9b91fbf..21808b9c0 100644
--- a/src/api.v2/controllers/metadataTrackController.js
+++ b/src/api.v2/controllers/metadataTrackController.js
@@ -82,6 +82,9 @@ const patchValueForSample = async (req, res) => {
 const parseMetadataFromTSV = (data, sampleNameToId) => {
   const invalidLines = [];
   const invalidSamples = new Set();
+  const invalidDuplicates = [];
+
+  const sampleMetadataPair = {};
 
   const result = data.trim().split('\n').map((line, index) => {
     // check that there are 3 elements per line
@@ -90,21 +93,31 @@ const parseMetadataFromTSV = (data, sampleNameToId) => {
       invalidLines.push(index + 1);
     }
 
-    // check that the sample name exists in the experiment
     const sampleName = elements[0];
     const metadataKey = elements[1].replace(/\s+/, '_');
     const metadataValue = elements[2];
 
+    // check that the sample name exists in the experiment
     if (!(sampleName in sampleNameToId)) {
       invalidSamples.add(sampleName);
     }
 
+    // Check for duplicates
+    if (sampleMetadataPair[`${sampleName}@${metadataKey}`] === undefined) {
+      sampleMetadataPair[`${sampleName}@${metadataKey}`] = index;
+    } else {
+      // Show metadata track with unreplaced value
+      const duplicateLine = sampleMetadataPair[`${sampleName}@${metadataKey}`];
+      invalidDuplicates.push(`${duplicateLine} & ${index}`);
+    }
+
     return { sampleId: sampleNameToId[sampleName], metadataKey, metadataValue };
   });
 
   const errors = [];
   if (invalidSamples.size > 0) errors.push(`Invalid sample names: ${Array.from(invalidSamples).join(', ')}`);
   if (invalidLines.length > 0) errors.push(`Invalid lines: ${invalidLines.join(', ')}`);
+  if (invalidDuplicates.length > 0) errors.push(`Multiple assignment on lines: ${invalidDuplicates.join(', ')}`);
   if (errors.length > 0) throw new BadRequestError(errors.join('\n'));
 
   return result;
diff --git a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
index 1b3d06ac9..bb1588050 100644
--- a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
+++ b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
@@ -18,17 +18,17 @@ Array [
     "sampleId": "mockSample3",
   },
   Object {
-    "metadataKey": "track_1",
+    "metadataKey": "track_2",
     "metadataValue": "value 1",
     "sampleId": "mockSample1",
   },
   Object {
-    "metadataKey": "track_1",
+    "metadataKey": "track_2",
     "metadataValue": "value 2",
     "sampleId": "mockSample2",
   },
   Object {
-    "metadataKey": "track_1",
+    "metadataKey": "track_2",
     "metadataValue": "value 3",
     "sampleId": "mockSample3",
   },
@@ -70,6 +70,8 @@ Array [
 ]
 `;
 
+exports[`metadataTrackController parseMetadataFromTSV throws error if there are duplicated input 1`] = `"Multiple assignment on lines: 1 & 2, 4 & 5"`;
+
 exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid lines 1`] = `"Invalid lines: 1, 4, 5"`;
 
 exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid samples 1`] = `"Invalid sample names: sample A, sample C"`;
diff --git a/tests/api.v2/controllers/metadataTrackController.test.js b/tests/api.v2/controllers/metadataTrackController.test.js
index 0c6c7feef..2a322ae98 100644
--- a/tests/api.v2/controllers/metadataTrackController.test.js
+++ b/tests/api.v2/controllers/metadataTrackController.test.js
@@ -246,6 +246,14 @@ describe('metadataTrackController', () => {
     }).toThrowErrorMatchingSnapshot();
   });
 
+  it('parseMetadataFromTSV throws error if there are duplicated input', () => {
+    const mockData = fs.readFileSync(path.join(__dirname, '../mocks/data/metadataInvalidDuplicates.tsv'), { encoding: 'utf-8' });
+
+    expect(() => {
+      metadataTrackController.parseMetadataFromTSV(mockData, mockMetadataSampleNameToId);
+    }).toThrowErrorMatchingSnapshot();
+  });
+
   it('parseMetadataFromTSV tolerates spaces after a line', () => {
     const mockData = fs.readFileSync(path.join(__dirname, '../mocks/data/metadataWithTrackSpaces.tsv'), { encoding: 'utf-8' });
     const result = metadataTrackController.parseMetadataFromTSV(
diff --git a/tests/api.v2/mocks/data/metadataInvalidDuplicates.tsv b/tests/api.v2/mocks/data/metadataInvalidDuplicates.tsv
new file mode 100644
index 000000000..25b1a0bf5
--- /dev/null
+++ b/tests/api.v2/mocks/data/metadataInvalidDuplicates.tsv
@@ -0,0 +1,10 @@
+
+
+sample 1	track_1	value 1
+sample 2	track_1	value 2
+sample 2	track_1	value 3
+sample 3	track_1	value 3
+sample 1	track_2	value 1
+sample 1	track_2	value 4
+sample 2	track_2	value 2
+sample 3	track_2	value 3
diff --git a/tests/api.v2/mocks/data/metadataWithLineSpaces.tsv b/tests/api.v2/mocks/data/metadataWithLineSpaces.tsv
index 3528003e3..b773283a3 100644
--- a/tests/api.v2/mocks/data/metadataWithLineSpaces.tsv
+++ b/tests/api.v2/mocks/data/metadataWithLineSpaces.tsv
@@ -3,6 +3,6 @@
 sample 1	track_1	value 1    
 sample 2	track_1	value 2		 
 sample 3	track_1	value 3 	
-	sample 1	track_1	value 1
-   sample 2	track_1	value 2
- sample 3	track_1	value 3
+	sample 1	track_2	value 1
+   sample 2	track_2	value 2
+ sample 3	track_2	value 3

From 4cd208a0a3de88487a1f29ff5401b62faf97d27d Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Mon, 27 Feb 2023 09:09:02 +0000
Subject: [PATCH 30/65] better error ext

---
 src/api.v2/controllers/metadataTrackController.js           | 6 +++---
 .../__snapshots__/metadataTrackController.test.js.snap      | 6 +++---
 2 files changed, 6 insertions(+), 6 deletions(-)

diff --git a/src/api.v2/controllers/metadataTrackController.js b/src/api.v2/controllers/metadataTrackController.js
index 21808b9c0..ca1f8e720 100644
--- a/src/api.v2/controllers/metadataTrackController.js
+++ b/src/api.v2/controllers/metadataTrackController.js
@@ -115,9 +115,9 @@ const parseMetadataFromTSV = (data, sampleNameToId) => {
   });
 
   const errors = [];
-  if (invalidSamples.size > 0) errors.push(`Invalid sample names: ${Array.from(invalidSamples).join(', ')}`);
-  if (invalidLines.length > 0) errors.push(`Invalid lines: ${invalidLines.join(', ')}`);
-  if (invalidDuplicates.length > 0) errors.push(`Multiple assignment on lines: ${invalidDuplicates.join(', ')}`);
+  if (invalidSamples.size > 0) errors.push(`Invalid sample names on line(s): ${Array.from(invalidSamples).join(', ')}`);
+  if (invalidLines.length > 0) errors.push(`Invalid line(s): ${invalidLines.join(', ')}`);
+  if (invalidDuplicates.length > 0) errors.push(`Multiple metadata assignment(s) on line(s): ${invalidDuplicates.join(', ')}`);
   if (errors.length > 0) throw new BadRequestError(errors.join('\n'));
 
   return result;
diff --git a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
index bb1588050..d34458d23 100644
--- a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
+++ b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
@@ -70,11 +70,11 @@ Array [
 ]
 `;
 
-exports[`metadataTrackController parseMetadataFromTSV throws error if there are duplicated input 1`] = `"Multiple assignment on lines: 1 & 2, 4 & 5"`;
+exports[`metadataTrackController parseMetadataFromTSV throws error if there are duplicated input 1`] = `"Multiple metadata assignment(s) on line(s): 1 & 2, 4 & 5"`;
 
-exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid lines 1`] = `"Invalid lines: 1, 4, 5"`;
+exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid lines 1`] = `"Invalid line(s): 1, 4, 5"`;
 
-exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid samples 1`] = `"Invalid sample names: sample A, sample C"`;
+exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid samples 1`] = `"Invalid sample names on line(s): sample A, sample C"`;
 
 exports[`metadataTrackController parseMetadataFromTSV tolerates spaces after a line 1`] = `
 Array [

From e1a972e9c45413036f48ba1cf5f16b3c846f0ade Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Mon, 27 Feb 2023 09:37:08 +0000
Subject: [PATCH 31/65] better text

---
 src/api.v2/controllers/metadataTrackController.js               | 2 +-
 .../__snapshots__/metadataTrackController.test.js.snap          | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/api.v2/controllers/metadataTrackController.js b/src/api.v2/controllers/metadataTrackController.js
index ca1f8e720..639a4b922 100644
--- a/src/api.v2/controllers/metadataTrackController.js
+++ b/src/api.v2/controllers/metadataTrackController.js
@@ -117,7 +117,7 @@ const parseMetadataFromTSV = (data, sampleNameToId) => {
   const errors = [];
   if (invalidSamples.size > 0) errors.push(`Invalid sample names on line(s): ${Array.from(invalidSamples).join(', ')}`);
   if (invalidLines.length > 0) errors.push(`Invalid line(s): ${invalidLines.join(', ')}`);
-  if (invalidDuplicates.length > 0) errors.push(`Multiple metadata assignment(s) on line(s): ${invalidDuplicates.join(', ')}`);
+  if (invalidDuplicates.length > 0) errors.push(`Multiple assignment(s) to the same entry on line(s): ${invalidDuplicates.join(', ')}`);
   if (errors.length > 0) throw new BadRequestError(errors.join('\n'));
 
   return result;
diff --git a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
index d34458d23..53b4e577f 100644
--- a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
+++ b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
@@ -70,7 +70,7 @@ Array [
 ]
 `;
 
-exports[`metadataTrackController parseMetadataFromTSV throws error if there are duplicated input 1`] = `"Multiple metadata assignment(s) on line(s): 1 & 2, 4 & 5"`;
+exports[`metadataTrackController parseMetadataFromTSV throws error if there are duplicated input 1`] = `"Multiple assignment(s) to the same entry on line(s): 1 & 2, 4 & 5"`;
 
 exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid lines 1`] = `"Invalid line(s): 1, 4, 5"`;
 

From d8e34be8dbd34dce5b785587cc42bd11621d10a2 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Mon, 27 Feb 2023 10:04:32 +0000
Subject: [PATCH 32/65] better error line

---
 src/api.v2/controllers/metadataTrackController.js          | 7 +++----
 .../__snapshots__/metadataTrackController.test.js.snap     | 2 +-
 2 files changed, 4 insertions(+), 5 deletions(-)

diff --git a/src/api.v2/controllers/metadataTrackController.js b/src/api.v2/controllers/metadataTrackController.js
index 639a4b922..e83dd4e73 100644
--- a/src/api.v2/controllers/metadataTrackController.js
+++ b/src/api.v2/controllers/metadataTrackController.js
@@ -102,13 +102,12 @@ const parseMetadataFromTSV = (data, sampleNameToId) => {
       invalidSamples.add(sampleName);
     }
 
-    // Check for duplicates
+    // Check for multiple metadata assignment to the same sample and track
     if (sampleMetadataPair[`${sampleName}@${metadataKey}`] === undefined) {
-      sampleMetadataPair[`${sampleName}@${metadataKey}`] = index;
+      sampleMetadataPair[`${sampleName}@${metadataKey}`] = index + 1;
     } else {
-      // Show metadata track with unreplaced value
       const duplicateLine = sampleMetadataPair[`${sampleName}@${metadataKey}`];
-      invalidDuplicates.push(`${duplicateLine} & ${index}`);
+      invalidDuplicates.push(`${duplicateLine} & ${index + 1}`);
     }
 
     return { sampleId: sampleNameToId[sampleName], metadataKey, metadataValue };
diff --git a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
index 53b4e577f..d1cb61af0 100644
--- a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
+++ b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
@@ -70,7 +70,7 @@ Array [
 ]
 `;
 
-exports[`metadataTrackController parseMetadataFromTSV throws error if there are duplicated input 1`] = `"Multiple assignment(s) to the same entry on line(s): 1 & 2, 4 & 5"`;
+exports[`metadataTrackController parseMetadataFromTSV throws error if there are duplicated input 1`] = `"Multiple assignment(s) to the same entry on line(s): 2 & 3, 5 & 6"`;
 
 exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid lines 1`] = `"Invalid line(s): 1, 4, 5"`;
 

From 44aceb27c8e0939f84066b0dd10b8efea1a6c29a Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Mon, 27 Feb 2023 11:55:19 +0000
Subject: [PATCH 33/65] address comments

---
 src/api.v2/controllers/metadataTrackController.js         | 8 ++++----
 .../__snapshots__/metadataTrackController.test.js.snap    | 2 +-
 2 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/src/api.v2/controllers/metadataTrackController.js b/src/api.v2/controllers/metadataTrackController.js
index e83dd4e73..44c9f5bb5 100644
--- a/src/api.v2/controllers/metadataTrackController.js
+++ b/src/api.v2/controllers/metadataTrackController.js
@@ -93,9 +93,9 @@ const parseMetadataFromTSV = (data, sampleNameToId) => {
       invalidLines.push(index + 1);
     }
 
-    const sampleName = elements[0];
-    const metadataKey = elements[1].replace(/\s+/, '_');
-    const metadataValue = elements[2];
+    const sampleName = elements[0].trim();
+    const metadataKey = elements[1].trim().replace(/\s+/, '_');
+    const metadataValue = elements[2].trim();
 
     // check that the sample name exists in the experiment
     if (!(sampleName in sampleNameToId)) {
@@ -103,7 +103,7 @@ const parseMetadataFromTSV = (data, sampleNameToId) => {
     }
 
     // Check for multiple metadata assignment to the same sample and track
-    if (sampleMetadataPair[`${sampleName}@${metadataKey}`] === undefined) {
+    if (!Object.prototype.hasOwnProperty.call(sampleMetadataPair, `${sampleName}@${metadataKey}`)) {
       sampleMetadataPair[`${sampleName}@${metadataKey}`] = index + 1;
     } else {
       const duplicateLine = sampleMetadataPair[`${sampleName}@${metadataKey}`];
diff --git a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
index d1cb61af0..cc4ef705b 100644
--- a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
+++ b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
@@ -72,7 +72,7 @@ Array [
 
 exports[`metadataTrackController parseMetadataFromTSV throws error if there are duplicated input 1`] = `"Multiple assignment(s) to the same entry on line(s): 2 & 3, 5 & 6"`;
 
-exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid lines 1`] = `"Invalid line(s): 1, 4, 5"`;
+exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid lines 1`] = `"Cannot read property 'trim' of undefined"`;
 
 exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid samples 1`] = `"Invalid sample names on line(s): sample A, sample C"`;
 

From 54f1421eb6dc46d41e027db431cf8fa37fa373ba Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Mon, 27 Feb 2023 12:03:23 +0000
Subject: [PATCH 34/65] rename variable

---
 src/api.v2/controllers/metadataTrackController.js | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/src/api.v2/controllers/metadataTrackController.js b/src/api.v2/controllers/metadataTrackController.js
index 44c9f5bb5..0f4747a00 100644
--- a/src/api.v2/controllers/metadataTrackController.js
+++ b/src/api.v2/controllers/metadataTrackController.js
@@ -84,7 +84,7 @@ const parseMetadataFromTSV = (data, sampleNameToId) => {
   const invalidSamples = new Set();
   const invalidDuplicates = [];
 
-  const sampleMetadataPair = {};
+  const sampleMetadataPairCounts = {};
 
   const result = data.trim().split('\n').map((line, index) => {
     // check that there are 3 elements per line
@@ -103,10 +103,10 @@ const parseMetadataFromTSV = (data, sampleNameToId) => {
     }
 
     // Check for multiple metadata assignment to the same sample and track
-    if (!Object.prototype.hasOwnProperty.call(sampleMetadataPair, `${sampleName}@${metadataKey}`)) {
-      sampleMetadataPair[`${sampleName}@${metadataKey}`] = index + 1;
+    if (!Object.prototype.hasOwnProperty.call(sampleMetadataPairCounts, `${sampleName}@${metadataKey}`)) {
+      sampleMetadataPairCounts[`${sampleName}@${metadataKey}`] = index + 1;
     } else {
-      const duplicateLine = sampleMetadataPair[`${sampleName}@${metadataKey}`];
+      const duplicateLine = sampleMetadataPairCounts[`${sampleName}@${metadataKey}`];
       invalidDuplicates.push(`${duplicateLine} & ${index + 1}`);
     }
 

From 46b6653d25dc984a01facb95573d27484f1e74a2 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Mon, 27 Feb 2023 12:22:52 +0000
Subject: [PATCH 35/65] update error message

---
 src/api.v2/controllers/metadataTrackController.js               | 2 +-
 .../__snapshots__/metadataTrackController.test.js.snap          | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/api.v2/controllers/metadataTrackController.js b/src/api.v2/controllers/metadataTrackController.js
index 0f4747a00..cc30af321 100644
--- a/src/api.v2/controllers/metadataTrackController.js
+++ b/src/api.v2/controllers/metadataTrackController.js
@@ -116,7 +116,7 @@ const parseMetadataFromTSV = (data, sampleNameToId) => {
   const errors = [];
   if (invalidSamples.size > 0) errors.push(`Invalid sample names on line(s): ${Array.from(invalidSamples).join(', ')}`);
   if (invalidLines.length > 0) errors.push(`Invalid line(s): ${invalidLines.join(', ')}`);
-  if (invalidDuplicates.length > 0) errors.push(`Multiple assignment(s) to the same entry on line(s): ${invalidDuplicates.join(', ')}`);
+  if (invalidDuplicates.length > 0) errors.push(`Multiple assignments to the same entry on lines: ${invalidDuplicates.join(', ')}`);
   if (errors.length > 0) throw new BadRequestError(errors.join('\n'));
 
   return result;
diff --git a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
index cc4ef705b..6e995bec6 100644
--- a/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
+++ b/tests/api.v2/controllers/__snapshots__/metadataTrackController.test.js.snap
@@ -70,7 +70,7 @@ Array [
 ]
 `;
 
-exports[`metadataTrackController parseMetadataFromTSV throws error if there are duplicated input 1`] = `"Multiple assignment(s) to the same entry on line(s): 2 & 3, 5 & 6"`;
+exports[`metadataTrackController parseMetadataFromTSV throws error if there are duplicated input 1`] = `"Multiple assignments to the same entry on lines: 2 & 3, 5 & 6"`;
 
 exports[`metadataTrackController parseMetadataFromTSV throws error if there are invalid lines 1`] = `"Cannot read property 'trim' of undefined"`;
 

From 6db5982a740b4d595225fdde68fdda730e406141 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Mon, 27 Feb 2023 13:44:32 +0000
Subject: [PATCH 36/65] add provider option to image repository

---
 .flux.yaml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.flux.yaml b/.flux.yaml
index a09aa5cc2..8089dc852 100644
--- a/.flux.yaml
+++ b/.flux.yaml
@@ -24,6 +24,7 @@ metadata:
 spec:
   image: FILLED_IN_BY_CI
   interval: 2m0s
+  provider: aws
 ---
 apiVersion: image.toolkit.fluxcd.io/v1beta1
 kind: ImagePolicy

From 917aea4a809575618dfcbe63d2f5a28af408e911 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Mon, 27 Feb 2023 11:36:02 -0300
Subject: [PATCH 37/65] Add poc implementation

---
 .../work-request-bodies/WorkRequestMarkerGenes.v2.yaml       | 5 ++++-
 1 file changed, 4 insertions(+), 1 deletion(-)

diff --git a/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml b/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
index a3c42df45..ef42868e9 100644
--- a/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
+++ b/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
@@ -12,7 +12,10 @@ properties:
   cellSetKey:
     type: string
     description: Cell set key to be be used in the heatmap
+  cellIds:
+    type: array
+    description: CellIds
 required:
   - name
   - nGenes
-  - cellSetKey
\ No newline at end of file
+  - cellSetKey

From a5775844b2883612d92db62ca0a55f947979ce64 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Mon, 27 Feb 2023 16:50:59 +0000
Subject: [PATCH 38/65] upgrade to flux image resources to use v1beta2

---
 .flux.yaml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.flux.yaml b/.flux.yaml
index 8089dc852..17742ff6b 100644
--- a/.flux.yaml
+++ b/.flux.yaml
@@ -16,7 +16,7 @@ spec:
   ref:
     branch: FILLED_IN_BY_CI
 ---
-apiVersion: image.toolkit.fluxcd.io/v1beta1
+apiVersion: image.toolkit.fluxcd.io/v1beta2
 kind: ImageRepository
 metadata:
   name: FILLED_IN_BY_CI
@@ -26,7 +26,7 @@ spec:
   interval: 2m0s
   provider: aws
 ---
-apiVersion: image.toolkit.fluxcd.io/v1beta1
+apiVersion: image.toolkit.fluxcd.io/v1beta2
 kind: ImagePolicy
 metadata:
   name: FILLED_IN_BY_CI

From 901979db30998614b0e84d455a196d7f82efb18e Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Mon, 27 Feb 2023 18:18:33 +0000
Subject: [PATCH 39/65] test changes

---
 src/api.v2/model/MetadataTrack.js | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/src/api.v2/model/MetadataTrack.js b/src/api.v2/model/MetadataTrack.js
index ed7e0904f..8046ba254 100644
--- a/src/api.v2/model/MetadataTrack.js
+++ b/src/api.v2/model/MetadataTrack.js
@@ -1,3 +1,6 @@
+// REMOVE BEFORE MERGING
+// Test flux propagating changes
+
 // @ts-nocheck
 const BasicModel = require('./BasicModel');
 const sqlClient = require('../../sql/sqlClient');

From a4415662a1b25734aaeb1b8b0c967097cc808bd8 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Mon, 27 Feb 2023 18:29:20 +0000
Subject: [PATCH 40/65] revert test changes

---
 src/api.v2/model/MetadataTrack.js | 3 ---
 1 file changed, 3 deletions(-)

diff --git a/src/api.v2/model/MetadataTrack.js b/src/api.v2/model/MetadataTrack.js
index 8046ba254..ed7e0904f 100644
--- a/src/api.v2/model/MetadataTrack.js
+++ b/src/api.v2/model/MetadataTrack.js
@@ -1,6 +1,3 @@
-// REMOVE BEFORE MERGING
-// Test flux propagating changes
-
 // @ts-nocheck
 const BasicModel = require('./BasicModel');
 const sqlClient = require('../../sql/sqlClient');

From 597dbf7921c378622df6562f681a76e4669313d8 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Tue, 28 Feb 2023 09:40:56 -0300
Subject: [PATCH 41/65] Remove submitMarkerHeatmapWork, it would require too
 much setup and is going to be run again in the ui anyways due to downsample
 ids

---
 .../helpers/pipeline/handleQCResponse.js      |  3 +-
 .../workSubmit/submitMarkerHeatmapWork.js     | 25 --------
 .../worker/submitMarkerHeatmapWork.test.js    | 64 -------------------
 3 files changed, 1 insertion(+), 91 deletions(-)
 delete mode 100644 src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
 delete mode 100644 tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js

diff --git a/src/api.v2/helpers/pipeline/handleQCResponse.js b/src/api.v2/helpers/pipeline/handleQCResponse.js
index 15958d7ae..eb741d684 100644
--- a/src/api.v2/helpers/pipeline/handleQCResponse.js
+++ b/src/api.v2/helpers/pipeline/handleQCResponse.js
@@ -18,7 +18,6 @@ const getPipelineStatus = require('./getPipelineStatus');
 const Experiment = require('../../model/Experiment');
 const Plot = require('../../model/Plot');
 const submitEmbeddingWork = require('../worker/workSubmit/submitEmbeddingWork');
-const submitMarkerHeatmapWork = require('../worker/workSubmit/submitMarkerHeatmapWork');
 
 const logger = getLogger();
 
@@ -26,7 +25,7 @@ const hookRunner = new HookRunner();
 
 hookRunner.register(constants.ASSIGN_POD_TO_PIPELINE, [assignPodToPipeline]);
 hookRunner.registerAll([sendNotification]);
-hookRunner.register('configureEmbedding', [cleanupPods, updatePipelineVersion, submitEmbeddingWork, submitMarkerHeatmapWork]);
+hookRunner.register('configureEmbedding', [cleanupPods, updatePipelineVersion, submitEmbeddingWork]);
 
 const getOutputFromS3 = async (message) => {
   const { output: { bucket, key } } = message;
diff --git a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
deleted file mode 100644
index 44607b5d8..000000000
--- a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
+++ /dev/null
@@ -1,25 +0,0 @@
-const getExtraDependencies = require('./getExtraDependencies');
-const submitWork = require('./submitWork');
-
-
-const submitMarkerHeatmapWork = async (message) => {
-  const { experimentId, input: { authJWT } } = message;
-
-  const numGenes = 5;
-  const selectedCellSet = 'louvain';
-
-  const body = {
-    name: 'MarkerHeatmap',
-    nGenes: numGenes,
-    cellSetKey: selectedCellSet,
-  };
-
-
-  const extraDependencies = await getExtraDependencies(body.name, message);
-  const ETag = await submitWork(experimentId, authJWT, body, extraDependencies);
-
-  // explicitly return ETag to make it stand out more in tests and so harder to break
-  return ETag;
-};
-
-module.exports = submitMarkerHeatmapWork;
diff --git a/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js b/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js
deleted file mode 100644
index 60327796a..000000000
--- a/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js
+++ /dev/null
@@ -1,64 +0,0 @@
-const createObjectHash = require('../../../../src/api.v2/helpers/worker/createObjectHash');
-const submitMarkerHeatmapWork = require('../../../../src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork');
-const validateAndSubmitWork = require('../../../../src/api.v2/events/validateAndSubmitWork');
-
-jest.mock('../../../../src/api.v2/helpers/worker/createObjectHash');
-jest.mock('../../../../src/api.v2/helpers/pipeline/getPipelineStatus');
-jest.mock('../../../../src/api.v2/helpers/worker/getWorkerStatus');
-jest.mock('../../../../src/api.v2/events/validateAndSubmitWork');
-
-
-const message = {
-  experimentId: '6463cb35-3e08-4e94-a181-6d155a5ca570',
-  taskName: 'configureEmbedding',
-  input: {
-    experimentId: '6463cb35-3e08-4e94-a181-6d155a5ca570',
-    taskName: 'configureEmbedding',
-    processName: 'qc',
-    sampleUuid: '',
-    uploadCountMatrix: false,
-    authJWT: 'Bearer whatever',
-    config: {
-      embeddingSettings: {
-        method: 'umap',
-        methodSettings: {
-          tsne: { perplexity: 30, learningRate: 200 },
-          umap: { distanceMetric: 'cosine', minimumDistance: 0.3 },
-        },
-      },
-      clusteringSettings: {
-        method: 'louvain',
-        methodSettings: { louvain: { resolution: 0.8 } },
-      },
-    },
-
-  },
-  output: {
-    bucket: 'worker-results-development-000000000000',
-    key: '0eabfedf-0efe-4abf-8725-7062c54ed5e1',
-  },
-  response: { error: false },
-  pipelineVersion: 2,
-  apiUrl: null,
-};
-
-describe('submitWorkEmbedding', () => {
-  // If this test fails it means you have changed parameters upon which the feature or precomputing
-  // the embedding / marker heatmp feature depends on. These parameters are duplicated
-  // in the UI / API if you have changed them here, make sure you change them in the
-  // other repository or that feature will stop working.
-  it('submits the work and the ETag / params are correct', async () => {
-    const ETag = await submitMarkerHeatmapWork(message);
-
-
-    // these are the parameters used to created the ETag and
-    // they should match exactly UI snapshot:
-    // loadMarkerGenes.defaultParams.test.js.snap
-    expect(createObjectHash.mock.calls).toMatchSnapshot();
-    // this ETag should match exactly the one in
-    // loadMarkerGenes.defaultParams.test.js
-    expect(ETag).toEqual('9db473fff00ea358446196ee3276f486'); // pragma: allowlist secret
-
-    expect(validateAndSubmitWork).toBeCalledTimes(1);
-  });
-});

From 50822ac803683dddd0366b11f7aa17df05a16636 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Wed, 1 Mar 2023 09:15:13 +0000
Subject: [PATCH 42/65] pass pipeline ignore ssl certificate to API

---
 .../paramsGetters/getGeneralParams.js         |  1 +
 .../constructors/submitBatchJob.js            |  2 +-
 src/config/default-config.js                  |  2 +-
 src/config/test-config.js                     |  2 +-
 .../pipelineConstruct.test.js.snap            |  9 +-
 .../stateMachineSkeletons.test.js.snap        | 91 ++++++++++++++-----
 .../default-config-production.test.js.snap    |  2 +-
 ...ault-config-staging-sandboxid.test.js.snap |  2 +-
 .../default-config-staging.test.js.snap       |  2 +-
 9 files changed, 85 insertions(+), 28 deletions(-)

diff --git a/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/paramsGetters/getGeneralParams.js b/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/paramsGetters/getGeneralParams.js
index 84da0e041..8bc6f8f2b 100644
--- a/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/paramsGetters/getGeneralParams.js
+++ b/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/paramsGetters/getGeneralParams.js
@@ -16,6 +16,7 @@ const getGeneralParams = (taskName, context) => {
     taskName,
     processName,
     server: remoterServer,
+    ignoreSslCert: config.pipelineIgnoreSSLCertificate,
   };
 
   return params;
diff --git a/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js b/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js
index d0ba0bfdb..fa0cffbe1 100644
--- a/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js
+++ b/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js
@@ -51,7 +51,7 @@ const submitBatchJob = (context, step) => {
           },
           {
             Name: 'IGNORE_SSL_CERTIFICATE',
-            Value: `${config.awsBatchIgnoreSSLCertificate}`,
+            Value: `${config.pipelineIgnoreSSLCertificate}`,
           },
           {
             Name: 'DOMAIN_NAME',
diff --git a/src/config/default-config.js b/src/config/default-config.js
index 39f243bc1..cd41e4afc 100644
--- a/src/config/default-config.js
+++ b/src/config/default-config.js
@@ -70,7 +70,7 @@ const config = {
   emailDomainName: `https://${domainName}`,
   publicApiUrl: `https://api.${domainName}`,
   // Insert an env variable to Batch work to ignore certs for deployments with self-signed certs.
-  awsBatchIgnoreSSLCertificate: Boolean(process.env.NODE_TLS_REJECT_UNAUTHORIZED),
+  pipelineIgnoreSSLCertificate: Boolean(process.env.NODE_TLS_REJECT_UNAUTHORIZED),
   // Used for Batch reporting
   datadogApiKey: process.env.DD_API_KEY || '',
   datadogAppKey: process.env.DD_APP_KEY || '',
diff --git a/src/config/test-config.js b/src/config/test-config.js
index 18510a08a..9093a66e8 100644
--- a/src/config/test-config.js
+++ b/src/config/test-config.js
@@ -21,7 +21,7 @@ module.exports = {
   },
   cachingEnabled: false,
   publicApiUrl: 'test-public-api-url',
-  awsBatchIgnoreSSLCertificate: false,
+  pipelineIgnoreSSLCertificate: false,
   datadogApiKey: 'test-datadog-api-key', // pragma: allowlist secret
   datadogAppKey: 'test-datadog-app-key',
   workerVersion: 3, // needs to match workerVersion in UI
diff --git a/tests/api.v2/helpers/pipeline/__snapshots__/pipelineConstruct.test.js.snap b/tests/api.v2/helpers/pipeline/__snapshots__/pipelineConstruct.test.js.snap
index 662bd6c62..5b204a19e 100644
--- a/tests/api.v2/helpers/pipeline/__snapshots__/pipelineConstruct.test.js.snap
+++ b/tests/api.v2/helpers/pipeline/__snapshots__/pipelineConstruct.test.js.snap
@@ -178,7 +178,7 @@ exports[`test for pipeline services Create Subset pipeline works: createStateMac
 Array [
   Array [
     Object {
-      "definition": "{\\"Comment\\":\\"Subset Pipeline for clusterEnv 'test'\\",\\"StartAt\\":\\"RequestPod\\",\\"States\\":{\\"RequestPod\\":{\\"ResultPath\\":null,\\"Next\\":\\"WaitForPod\\",\\"Comment\\":\\"Send a message through SNS so that the API assigns a pod to the pipeline\\",\\"Type\\":\\"Task\\",\\"Resource\\":\\"arn:aws:states:::sns:publish\\",\\"Parameters\\":{\\"TopicArn\\":\\"arn:aws:sns:eu-west-1:000000000000:work-results-test-default-v2\\",\\"Message\\":\\"{\\\\\\"taskName\\\\\\":\\\\\\"assignPodToPipeline\\\\\\",\\\\\\"experimentId\\\\\\":\\\\\\"toExperimentId\\\\\\",\\\\\\"apiUrl\\\\\\":\\\\\\"test-public-api-url\\\\\\",\\\\\\"input\\\\\\":{\\\\\\"experimentId\\\\\\":\\\\\\"toExperimentId\\\\\\",\\\\\\"sandboxId\\\\\\":\\\\\\"default\\\\\\",\\\\\\"activityId\\\\\\":\\\\\\"pipeline-test-mock-uuid\\\\\\",\\\\\\"processName\\\\\\":\\\\\\"subset\\\\\\"}}\\",\\"MessageAttributes\\":{\\"type\\":{\\"DataType\\":\\"String\\",\\"StringValue\\":\\"PipelineResponse\\"}}}},\\"WaitForPod\\":{\\"ResultPath\\":null,\\"Next\\":\\"SubsetSeurat\\",\\"Type\\":\\"Map\\",\\"ItemsPath\\":\\"$.retries\\",\\"MaxConcurrency\\":1,\\"Retry\\":[{\\"ErrorEquals\\":[\\"NoPodAssigned\\"],\\"IntervalSeconds\\":1,\\"MaxAttempts\\":13,\\"BackoffRate\\":1.5}],\\"Iterator\\":{\\"StartAt\\":\\"GetAssignedPod\\",\\"States\\":{\\"GetAssignedPod\\":{\\"Next\\":\\"IsPodAssigned\\",\\"Type\\":\\"Task\\",\\"Comment\\":\\"Retrieves first unassigned and running pipeline pod.\\",\\"Resource\\":\\"arn:aws:states:::eks:call\\",\\"Parameters\\":{\\"ClusterName\\":\\"biomage-test\\",\\"CertificateAuthority\\":\\"AAAAAAAAAAA\\",\\"Endpoint\\":\\"https://test-endpoint.me/fgh\\",\\"Method\\":\\"GET\\",\\"Path\\":\\"/api/v1/namespaces/pipeline-test-namespace/pods\\",\\"QueryParameters\\":{\\"labelSelector\\":[\\"type=pipeline,activityId=pipeline-test-mock-uuid\\"]}}},\\"IsPodAssigned\\":{\\"Type\\":\\"Choice\\",\\"Comment\\":\\"Redirects to an error state if the pipeline pod is not assigned yet.\\",\\"Choices\\":[{\\"Variable\\":\\"$.ResponseBody.items[0]\\",\\"IsPresent\\":false,\\"Next\\":\\"NoPodAssigned\\"}],\\"Default\\":\\"ReadyToRun\\"},\\"NoPodAssigned\\":{\\"Type\\":\\"Fail\\",\\"Cause\\":\\"No available and running pipeline pods.\\",\\"Error\\":\\"NoPodAssigned\\"},\\"ReadyToRun\\":{\\"Type\\":\\"Pass\\",\\"End\\":true}}}},\\"SubsetSeurat\\":{\\"Next\\":\\"PrepareExperiment\\",\\"Type\\":\\"Task\\",\\"Resource\\":\\"arn:aws:states:eu-west-1:000000000000:activity:pipeline-test-mock-uuid\\",\\"ResultPath\\":null,\\"TimeoutSeconds\\":10800,\\"HeartbeatSeconds\\":90,\\"Parameters\\":{\\"experimentId\\":\\"toExperimentId\\",\\"taskName\\":\\"subsetSeurat\\",\\"processName\\":\\"subset\\",\\"server\\":\\"remoter-server-toExperimentId.pipeline-test-namespace.svc.cluster.local\\",\\"parentExperimentId\\":\\"fromExperimentId\\",\\"subsetExperimentId\\":\\"toExperimentId\\",\\"cellSetKeys\\":[\\"louvain-1\\",\\"louvain-2\\"]},\\"Catch\\":[{\\"ErrorEquals\\":[\\"States.ALL\\"],\\"ResultPath\\":\\"$.errorInfo\\",\\"Next\\":\\"HandleError\\"}]},\\"PrepareExperiment\\":{\\"Next\\":\\"UploadToAWS\\",\\"Type\\":\\"Task\\",\\"Resource\\":\\"arn:aws:states:eu-west-1:000000000000:activity:pipeline-test-mock-uuid\\",\\"ResultPath\\":null,\\"TimeoutSeconds\\":10800,\\"HeartbeatSeconds\\":90,\\"Parameters\\":{\\"experimentId\\":\\"toExperimentId\\",\\"taskName\\":\\"prepareExperiment\\",\\"processName\\":\\"subset\\",\\"server\\":\\"remoter-server-toExperimentId.pipeline-test-namespace.svc.cluster.local\\",\\"experimentName\\":\\"toExperimentName\\",\\"authJWT\\":\\"mockAuthJWT\\"},\\"Catch\\":[{\\"ErrorEquals\\":[\\"States.ALL\\"],\\"ResultPath\\":\\"$.errorInfo\\",\\"Next\\":\\"HandleError\\"}]},\\"UploadToAWS\\":{\\"Next\\":\\"EndOfPipeline\\",\\"Type\\":\\"Task\\",\\"Resource\\":\\"arn:aws:states:eu-west-1:000000000000:activity:pipeline-test-mock-uuid\\",\\"ResultPath\\":null,\\"TimeoutSeconds\\":10800,\\"HeartbeatSeconds\\":90,\\"Parameters\\":{\\"experimentId\\":\\"toExperimentId\\",\\"taskName\\":\\"uploadToAWS\\",\\"processName\\":\\"subset\\",\\"server\\":\\"remoter-server-toExperimentId.pipeline-test-namespace.svc.cluster.local\\",\\"experimentName\\":\\"toExperimentName\\",\\"authJWT\\":\\"mockAuthJWT\\"},\\"Catch\\":[{\\"ErrorEquals\\":[\\"States.ALL\\"],\\"ResultPath\\":\\"$.errorInfo\\",\\"Next\\":\\"HandleError\\"}]},\\"HandleError\\":{\\"Next\\":\\"MarkAsFailed\\",\\"Type\\":\\"Task\\",\\"Resource\\":\\"arn:aws:states:::sns:publish\\",\\"Parameters\\":{\\"TopicArn\\":\\"arn:aws:sns:eu-west-1:000000000000:work-results-test-default-v2\\",\\"Message.$\\":\\"States.Format('\\\\\\\\{\\\\\\"taskName\\\\\\":\\\\\\"pipelineError\\\\\\",\\\\\\"experimentId\\\\\\":\\\\\\"toExperimentId\\\\\\",\\\\\\"apiUrl\\\\\\":\\\\\\"test-public-api-url\\\\\\",\\\\\\"input\\\\\\":\\\\\\\\{\\\\\\"experimentId\\\\\\":\\\\\\"toExperimentId\\\\\\",\\\\\\"error\\\\\\":\\\\\\"{}\\\\\\",\\\\\\"taskName\\\\\\":\\\\\\"pipelineError\\\\\\",\\\\\\"sandboxId\\\\\\":\\\\\\"default\\\\\\",\\\\\\"activityId\\\\\\":\\\\\\"pipeline-test-mock-uuid\\\\\\",\\\\\\"processName\\\\\\":\\\\\\"subset\\\\\\"\\\\\\\\}\\\\\\\\}', $.errorInfo.Error)\\",\\"MessageAttributes\\":{\\"type\\":{\\"DataType\\":\\"String\\",\\"StringValue\\":\\"PipelineResponse\\"}}}},\\"MarkAsFailed\\":{\\"Type\\":\\"Fail\\"},\\"EndOfPipeline\\":{\\"Type\\":\\"Pass\\",\\"End\\":true}}}",
+      "definition": "{\\"Comment\\":\\"Subset Pipeline for clusterEnv 'test'\\",\\"StartAt\\":\\"RequestPod\\",\\"States\\":{\\"RequestPod\\":{\\"ResultPath\\":null,\\"Next\\":\\"WaitForPod\\",\\"Comment\\":\\"Send a message through SNS so that the API assigns a pod to the pipeline\\",\\"Type\\":\\"Task\\",\\"Resource\\":\\"arn:aws:states:::sns:publish\\",\\"Parameters\\":{\\"TopicArn\\":\\"arn:aws:sns:eu-west-1:000000000000:work-results-test-default-v2\\",\\"Message\\":\\"{\\\\\\"taskName\\\\\\":\\\\\\"assignPodToPipeline\\\\\\",\\\\\\"experimentId\\\\\\":\\\\\\"toExperimentId\\\\\\",\\\\\\"apiUrl\\\\\\":\\\\\\"test-public-api-url\\\\\\",\\\\\\"input\\\\\\":{\\\\\\"experimentId\\\\\\":\\\\\\"toExperimentId\\\\\\",\\\\\\"sandboxId\\\\\\":\\\\\\"default\\\\\\",\\\\\\"activityId\\\\\\":\\\\\\"pipeline-test-mock-uuid\\\\\\",\\\\\\"processName\\\\\\":\\\\\\"subset\\\\\\"}}\\",\\"MessageAttributes\\":{\\"type\\":{\\"DataType\\":\\"String\\",\\"StringValue\\":\\"PipelineResponse\\"}}}},\\"WaitForPod\\":{\\"ResultPath\\":null,\\"Next\\":\\"SubsetSeurat\\",\\"Type\\":\\"Map\\",\\"ItemsPath\\":\\"$.retries\\",\\"MaxConcurrency\\":1,\\"Retry\\":[{\\"ErrorEquals\\":[\\"NoPodAssigned\\"],\\"IntervalSeconds\\":1,\\"MaxAttempts\\":13,\\"BackoffRate\\":1.5}],\\"Iterator\\":{\\"StartAt\\":\\"GetAssignedPod\\",\\"States\\":{\\"GetAssignedPod\\":{\\"Next\\":\\"IsPodAssigned\\",\\"Type\\":\\"Task\\",\\"Comment\\":\\"Retrieves first unassigned and running pipeline pod.\\",\\"Resource\\":\\"arn:aws:states:::eks:call\\",\\"Parameters\\":{\\"ClusterName\\":\\"biomage-test\\",\\"CertificateAuthority\\":\\"AAAAAAAAAAA\\",\\"Endpoint\\":\\"https://test-endpoint.me/fgh\\",\\"Method\\":\\"GET\\",\\"Path\\":\\"/api/v1/namespaces/pipeline-test-namespace/pods\\",\\"QueryParameters\\":{\\"labelSelector\\":[\\"type=pipeline,activityId=pipeline-test-mock-uuid\\"]}}},\\"IsPodAssigned\\":{\\"Type\\":\\"Choice\\",\\"Comment\\":\\"Redirects to an error state if the pipeline pod is not assigned yet.\\",\\"Choices\\":[{\\"Variable\\":\\"$.ResponseBody.items[0]\\",\\"IsPresent\\":false,\\"Next\\":\\"NoPodAssigned\\"}],\\"Default\\":\\"ReadyToRun\\"},\\"NoPodAssigned\\":{\\"Type\\":\\"Fail\\",\\"Cause\\":\\"No available and running pipeline pods.\\",\\"Error\\":\\"NoPodAssigned\\"},\\"ReadyToRun\\":{\\"Type\\":\\"Pass\\",\\"End\\":true}}}},\\"SubsetSeurat\\":{\\"Next\\":\\"PrepareExperiment\\",\\"Type\\":\\"Task\\",\\"Resource\\":\\"arn:aws:states:eu-west-1:000000000000:activity:pipeline-test-mock-uuid\\",\\"ResultPath\\":null,\\"TimeoutSeconds\\":10800,\\"HeartbeatSeconds\\":90,\\"Parameters\\":{\\"experimentId\\":\\"toExperimentId\\",\\"taskName\\":\\"subsetSeurat\\",\\"processName\\":\\"subset\\",\\"server\\":\\"remoter-server-toExperimentId.pipeline-test-namespace.svc.cluster.local\\",\\"ignoreSslCert\\":false,\\"parentExperimentId\\":\\"fromExperimentId\\",\\"subsetExperimentId\\":\\"toExperimentId\\",\\"cellSetKeys\\":[\\"louvain-1\\",\\"louvain-2\\"]},\\"Catch\\":[{\\"ErrorEquals\\":[\\"States.ALL\\"],\\"ResultPath\\":\\"$.errorInfo\\",\\"Next\\":\\"HandleError\\"}]},\\"PrepareExperiment\\":{\\"Next\\":\\"UploadToAWS\\",\\"Type\\":\\"Task\\",\\"Resource\\":\\"arn:aws:states:eu-west-1:000000000000:activity:pipeline-test-mock-uuid\\",\\"ResultPath\\":null,\\"TimeoutSeconds\\":10800,\\"HeartbeatSeconds\\":90,\\"Parameters\\":{\\"experimentId\\":\\"toExperimentId\\",\\"taskName\\":\\"prepareExperiment\\",\\"processName\\":\\"subset\\",\\"server\\":\\"remoter-server-toExperimentId.pipeline-test-namespace.svc.cluster.local\\",\\"ignoreSslCert\\":false,\\"experimentName\\":\\"toExperimentName\\",\\"authJWT\\":\\"mockAuthJWT\\"},\\"Catch\\":[{\\"ErrorEquals\\":[\\"States.ALL\\"],\\"ResultPath\\":\\"$.errorInfo\\",\\"Next\\":\\"HandleError\\"}]},\\"UploadToAWS\\":{\\"Next\\":\\"EndOfPipeline\\",\\"Type\\":\\"Task\\",\\"Resource\\":\\"arn:aws:states:eu-west-1:000000000000:activity:pipeline-test-mock-uuid\\",\\"ResultPath\\":null,\\"TimeoutSeconds\\":10800,\\"HeartbeatSeconds\\":90,\\"Parameters\\":{\\"experimentId\\":\\"toExperimentId\\",\\"taskName\\":\\"uploadToAWS\\",\\"processName\\":\\"subset\\",\\"server\\":\\"remoter-server-toExperimentId.pipeline-test-namespace.svc.cluster.local\\",\\"ignoreSslCert\\":false,\\"experimentName\\":\\"toExperimentName\\",\\"authJWT\\":\\"mockAuthJWT\\"},\\"Catch\\":[{\\"ErrorEquals\\":[\\"States.ALL\\"],\\"ResultPath\\":\\"$.errorInfo\\",\\"Next\\":\\"HandleError\\"}]},\\"HandleError\\":{\\"Next\\":\\"MarkAsFailed\\",\\"Type\\":\\"Task\\",\\"Resource\\":\\"arn:aws:states:::sns:publish\\",\\"Parameters\\":{\\"TopicArn\\":\\"arn:aws:sns:eu-west-1:000000000000:work-results-test-default-v2\\",\\"Message.$\\":\\"States.Format('\\\\\\\\{\\\\\\"taskName\\\\\\":\\\\\\"pipelineError\\\\\\",\\\\\\"experimentId\\\\\\":\\\\\\"toExperimentId\\\\\\",\\\\\\"apiUrl\\\\\\":\\\\\\"test-public-api-url\\\\\\",\\\\\\"input\\\\\\":\\\\\\\\{\\\\\\"experimentId\\\\\\":\\\\\\"toExperimentId\\\\\\",\\\\\\"error\\\\\\":\\\\\\"{}\\\\\\",\\\\\\"taskName\\\\\\":\\\\\\"pipelineError\\\\\\",\\\\\\"sandboxId\\\\\\":\\\\\\"default\\\\\\",\\\\\\"activityId\\\\\\":\\\\\\"pipeline-test-mock-uuid\\\\\\",\\\\\\"processName\\\\\\":\\\\\\"subset\\\\\\"\\\\\\\\}\\\\\\\\}', $.errorInfo.Error)\\",\\"MessageAttributes\\":{\\"type\\":{\\"DataType\\":\\"String\\",\\"StringValue\\":\\"PipelineResponse\\"}}}},\\"MarkAsFailed\\":{\\"Type\\":\\"Fail\\"},\\"EndOfPipeline\\":{\\"Type\\":\\"Pass\\",\\"End\\":true}}}",
       "loggingConfiguration": Object {
         "level": "OFF",
       },
@@ -279,6 +279,7 @@ Array [
                     },
                   },
                   "experimentId": "testExperimentId",
+                  "ignoreSslCert": false,
                   "processName": "qc",
                   "sampleUuid.$": "$.sampleUuid",
                   "server": "remoter-server-testExperimentId.pipeline-test-namespace.svc.cluster.local",
@@ -333,6 +334,7 @@ Array [
                     },
                   },
                   "experimentId": "testExperimentId",
+                  "ignoreSslCert": false,
                   "processName": "qc",
                   "sampleUuid.$": "$.sampleUuid",
                   "server": "remoter-server-testExperimentId.pipeline-test-namespace.svc.cluster.local",
@@ -387,6 +389,7 @@ Array [
               },
             },
             "experimentId": "testExperimentId",
+            "ignoreSslCert": false,
             "processName": "qc",
             "sampleUuid": "",
             "server": "remoter-server-testExperimentId.pipeline-test-namespace.svc.cluster.local",
@@ -440,6 +443,7 @@ Array [
               },
             },
             "experimentId": "testExperimentId",
+            "ignoreSslCert": false,
             "processName": "qc",
             "sampleUuid": "",
             "server": "remoter-server-testExperimentId.pipeline-test-namespace.svc.cluster.local",
@@ -482,6 +486,7 @@ Array [
                     },
                   },
                   "experimentId": "testExperimentId",
+                  "ignoreSslCert": false,
                   "processName": "qc",
                   "sampleUuid.$": "$.sampleUuid",
                   "server": "remoter-server-testExperimentId.pipeline-test-namespace.svc.cluster.local",
@@ -568,6 +573,7 @@ Array [
                     },
                   },
                   "experimentId": "testExperimentId",
+                  "ignoreSslCert": false,
                   "processName": "qc",
                   "sampleUuid.$": "$.sampleUuid",
                   "server": "remoter-server-testExperimentId.pipeline-test-namespace.svc.cluster.local",
@@ -636,6 +642,7 @@ Array [
                     },
                   },
                   "experimentId": "testExperimentId",
+                  "ignoreSslCert": false,
                   "processName": "qc",
                   "sampleUuid.$": "$.sampleUuid",
                   "server": "remoter-server-testExperimentId.pipeline-test-namespace.svc.cluster.local",
diff --git a/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap b/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap
index 6cb4c3e2d..73111f329 100644
--- a/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap
+++ b/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap
@@ -51,7 +51,8 @@ exports[`non-tests to document the State Machines - gem2s local development 1`]
         "experimentId": "mock-experiment-id",
         "taskName": "downloadGem",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -74,7 +75,8 @@ exports[`non-tests to document the State Machines - gem2s local development 1`]
         "experimentId": "mock-experiment-id",
         "taskName": "preproc",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -97,7 +99,8 @@ exports[`non-tests to document the State Machines - gem2s local development 1`]
         "experimentId": "mock-experiment-id",
         "taskName": "emptyDrops",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -120,7 +123,8 @@ exports[`non-tests to document the State Machines - gem2s local development 1`]
         "experimentId": "mock-experiment-id",
         "taskName": "doubletScores",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -143,7 +147,8 @@ exports[`non-tests to document the State Machines - gem2s local development 1`]
         "experimentId": "mock-experiment-id",
         "taskName": "createSeurat",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -166,7 +171,8 @@ exports[`non-tests to document the State Machines - gem2s local development 1`]
         "experimentId": "mock-experiment-id",
         "taskName": "prepareExperiment",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -189,7 +195,8 @@ exports[`non-tests to document the State Machines - gem2s local development 1`]
         "experimentId": "mock-experiment-id",
         "taskName": "uploadToAWS",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -322,7 +329,8 @@ exports[`non-tests to document the State Machines - gem2s production 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "downloadGem",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -345,7 +353,8 @@ exports[`non-tests to document the State Machines - gem2s production 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "preproc",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -368,7 +377,8 @@ exports[`non-tests to document the State Machines - gem2s production 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "emptyDrops",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -391,7 +401,8 @@ exports[`non-tests to document the State Machines - gem2s production 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "doubletScores",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -414,7 +425,8 @@ exports[`non-tests to document the State Machines - gem2s production 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "createSeurat",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -437,7 +449,8 @@ exports[`non-tests to document the State Machines - gem2s production 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "prepareExperiment",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -460,7 +473,8 @@ exports[`non-tests to document the State Machines - gem2s production 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "uploadToAWS",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -593,7 +607,8 @@ exports[`non-tests to document the State Machines - gem2s staging 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "downloadGem",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -616,7 +631,8 @@ exports[`non-tests to document the State Machines - gem2s staging 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "preproc",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -639,7 +655,8 @@ exports[`non-tests to document the State Machines - gem2s staging 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "emptyDrops",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -662,7 +679,8 @@ exports[`non-tests to document the State Machines - gem2s staging 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "doubletScores",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -685,7 +703,8 @@ exports[`non-tests to document the State Machines - gem2s staging 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "createSeurat",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -708,7 +727,8 @@ exports[`non-tests to document the State Machines - gem2s staging 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "prepareExperiment",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -731,7 +751,8 @@ exports[`non-tests to document the State Machines - gem2s staging 1`] = `
         "experimentId": "mock-experiment-id",
         "taskName": "uploadToAWS",
         "processName": "gem2s",
-        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local"
+        "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false
       },
       "Catch": [
         {
@@ -829,6 +850,7 @@ exports[`non-tests to document the State Machines - qc local development 1`] = `
               "taskName": "classifier",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -867,6 +889,7 @@ exports[`non-tests to document the State Machines - qc local development 1`] = `
               "taskName": "cellSizeDistribution",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -905,6 +928,7 @@ exports[`non-tests to document the State Machines - qc local development 1`] = `
               "taskName": "mitochondrialContent",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -943,6 +967,7 @@ exports[`non-tests to document the State Machines - qc local development 1`] = `
               "taskName": "numGenesVsNumUmis",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -981,6 +1006,7 @@ exports[`non-tests to document the State Machines - qc local development 1`] = `
               "taskName": "doubletScores",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1011,6 +1037,7 @@ exports[`non-tests to document the State Machines - qc local development 1`] = `
         "taskName": "dataIntegration",
         "processName": "qc",
         "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false,
         "sampleUuid": "",
         "uploadCountMatrix": true,
         "authJWT": "mockAuthJWT",
@@ -1038,6 +1065,7 @@ exports[`non-tests to document the State Machines - qc local development 1`] = `
         "taskName": "configureEmbedding",
         "processName": "qc",
         "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false,
         "sampleUuid": "",
         "uploadCountMatrix": false,
         "authJWT": "mockAuthJWT",
@@ -1183,6 +1211,7 @@ exports[`non-tests to document the State Machines - qc production 1`] = `
               "taskName": "classifier",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1221,6 +1250,7 @@ exports[`non-tests to document the State Machines - qc production 1`] = `
               "taskName": "cellSizeDistribution",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1259,6 +1289,7 @@ exports[`non-tests to document the State Machines - qc production 1`] = `
               "taskName": "mitochondrialContent",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1297,6 +1328,7 @@ exports[`non-tests to document the State Machines - qc production 1`] = `
               "taskName": "numGenesVsNumUmis",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1335,6 +1367,7 @@ exports[`non-tests to document the State Machines - qc production 1`] = `
               "taskName": "doubletScores",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1365,6 +1398,7 @@ exports[`non-tests to document the State Machines - qc production 1`] = `
         "taskName": "dataIntegration",
         "processName": "qc",
         "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false,
         "sampleUuid": "",
         "uploadCountMatrix": true,
         "authJWT": "mockAuthJWT",
@@ -1392,6 +1426,7 @@ exports[`non-tests to document the State Machines - qc production 1`] = `
         "taskName": "configureEmbedding",
         "processName": "qc",
         "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false,
         "sampleUuid": "",
         "uploadCountMatrix": false,
         "authJWT": "mockAuthJWT",
@@ -1537,6 +1572,7 @@ exports[`non-tests to document the State Machines - qc production with specific
               "taskName": "classifier",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1575,6 +1611,7 @@ exports[`non-tests to document the State Machines - qc production with specific
               "taskName": "cellSizeDistribution",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1613,6 +1650,7 @@ exports[`non-tests to document the State Machines - qc production with specific
               "taskName": "mitochondrialContent",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1651,6 +1689,7 @@ exports[`non-tests to document the State Machines - qc production with specific
               "taskName": "numGenesVsNumUmis",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1689,6 +1728,7 @@ exports[`non-tests to document the State Machines - qc production with specific
               "taskName": "doubletScores",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1719,6 +1759,7 @@ exports[`non-tests to document the State Machines - qc production with specific
         "taskName": "dataIntegration",
         "processName": "qc",
         "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false,
         "sampleUuid": "",
         "uploadCountMatrix": true,
         "authJWT": "mockAuthJWT",
@@ -1746,6 +1787,7 @@ exports[`non-tests to document the State Machines - qc production with specific
         "taskName": "configureEmbedding",
         "processName": "qc",
         "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false,
         "sampleUuid": "",
         "uploadCountMatrix": false,
         "authJWT": "mockAuthJWT",
@@ -1891,6 +1933,7 @@ exports[`non-tests to document the State Machines - qc staging 1`] = `
               "taskName": "classifier",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1929,6 +1972,7 @@ exports[`non-tests to document the State Machines - qc staging 1`] = `
               "taskName": "cellSizeDistribution",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -1967,6 +2011,7 @@ exports[`non-tests to document the State Machines - qc staging 1`] = `
               "taskName": "mitochondrialContent",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -2005,6 +2050,7 @@ exports[`non-tests to document the State Machines - qc staging 1`] = `
               "taskName": "numGenesVsNumUmis",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -2043,6 +2089,7 @@ exports[`non-tests to document the State Machines - qc staging 1`] = `
               "taskName": "doubletScores",
               "processName": "qc",
               "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+              "ignoreSslCert": false,
               "sampleUuid.$": "$.sampleUuid",
               "uploadCountMatrix": false,
               "authJWT": "mockAuthJWT",
@@ -2073,6 +2120,7 @@ exports[`non-tests to document the State Machines - qc staging 1`] = `
         "taskName": "dataIntegration",
         "processName": "qc",
         "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false,
         "sampleUuid": "",
         "uploadCountMatrix": true,
         "authJWT": "mockAuthJWT",
@@ -2100,6 +2148,7 @@ exports[`non-tests to document the State Machines - qc staging 1`] = `
         "taskName": "configureEmbedding",
         "processName": "qc",
         "server": "remoter-server-mock-experiment-id.pipeline-test-namespace.svc.cluster.local",
+        "ignoreSslCert": false,
         "sampleUuid": "",
         "uploadCountMatrix": false,
         "authJWT": "mockAuthJWT",
diff --git a/tests/config/__snapshots__/default-config-production.test.js.snap b/tests/config/__snapshots__/default-config-production.test.js.snap
index 5c7344e67..f86feb89d 100644
--- a/tests/config/__snapshots__/default-config-production.test.js.snap
+++ b/tests/config/__snapshots__/default-config-production.test.js.snap
@@ -6,7 +6,6 @@ Object {
     "prefix": "/",
   },
   "awsAccountId": "000000000000",
-  "awsBatchIgnoreSSLCertificate": false,
   "awsRegion": "eu-west-1",
   "cachingEnabled": true,
   "clusterEnv": "production",
@@ -24,6 +23,7 @@ Object {
   "datadogAppKey": "",
   "domainName": "localhost:5000",
   "emailDomainName": "https://localhost:5000",
+  "pipelineIgnoreSSLCertificate": false,
   "pipelineInstanceConfigUrl": "https://raw.githubusercontent.com/biomage-org/releases/master/production/pipeline.yaml",
   "pipelineNamespace": "pipeline-default",
   "podName": "local",
diff --git a/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap b/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap
index c05e99694..4e6477db8 100644
--- a/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap
+++ b/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap
@@ -6,7 +6,6 @@ Object {
     "prefix": "/",
   },
   "awsAccountId": "000000000000",
-  "awsBatchIgnoreSSLCertificate": false,
   "awsRegion": "eu-west-1",
   "cachingEnabled": false,
   "clusterEnv": "staging",
@@ -24,6 +23,7 @@ Object {
   "datadogAppKey": "",
   "domainName": "scp-staging.biomage.net",
   "emailDomainName": "https://ui-mockedSandboxId.scp-staging.biomage.net",
+  "pipelineIgnoreSSLCertificate": false,
   "pipelineInstanceConfigUrl": "https://raw.githubusercontent.com/biomage-org/releases/master/staging/mockedSandboxId.yaml",
   "pipelineNamespace": "pipeline-mockedSandboxId",
   "podName": "local",
diff --git a/tests/config/__snapshots__/default-config-staging.test.js.snap b/tests/config/__snapshots__/default-config-staging.test.js.snap
index 9eb6fdac3..5e75e2176 100644
--- a/tests/config/__snapshots__/default-config-staging.test.js.snap
+++ b/tests/config/__snapshots__/default-config-staging.test.js.snap
@@ -6,7 +6,6 @@ Object {
     "prefix": "/",
   },
   "awsAccountId": "000000000000",
-  "awsBatchIgnoreSSLCertificate": false,
   "awsRegion": "eu-west-1",
   "cachingEnabled": false,
   "clusterEnv": "staging",
@@ -24,6 +23,7 @@ Object {
   "datadogAppKey": "",
   "domainName": "scp-staging.biomage.net",
   "emailDomainName": "https://ui-default.scp-staging.biomage.net",
+  "pipelineIgnoreSSLCertificate": false,
   "pipelineInstanceConfigUrl": "https://raw.githubusercontent.com/biomage-org/releases/master/staging/pipeline.yaml",
   "pipelineNamespace": "pipeline-default",
   "podName": "local",

From d678dcead4766e7b118cb51318306cd5962c30a4 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Wed, 1 Mar 2023 11:20:43 +0000
Subject: [PATCH 43/65] pass ignore pipeline as a parameter to pipeline

---
 .../pipeline/pipelineConstruct/constructors/submitBatchJob.js | 4 ----
 src/config/default-config.js                                  | 2 +-
 .../pipeline/__snapshots__/stateMachineSkeletons.test.js.snap | 4 ----
 3 files changed, 1 insertion(+), 9 deletions(-)

diff --git a/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js b/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js
index fa0cffbe1..a36cd0f65 100644
--- a/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js
+++ b/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js
@@ -49,10 +49,6 @@ const submitBatchJob = (context, step) => {
             Name: 'BATCH',
             Value: 'true',
           },
-          {
-            Name: 'IGNORE_SSL_CERTIFICATE',
-            Value: `${config.pipelineIgnoreSSLCertificate}`,
-          },
           {
             Name: 'DOMAIN_NAME',
             Value: `${config.domainName}`,
diff --git a/src/config/default-config.js b/src/config/default-config.js
index cd41e4afc..c0c056a21 100644
--- a/src/config/default-config.js
+++ b/src/config/default-config.js
@@ -69,7 +69,7 @@ const config = {
   corsOriginUrl: [...externalOrigins, `https://${domainName}`],
   emailDomainName: `https://${domainName}`,
   publicApiUrl: `https://api.${domainName}`,
-  // Insert an env variable to Batch work to ignore certs for deployments with self-signed certs.
+  // Insert an env variable to allow pipeline to work for deployments with self-signed certs.
   pipelineIgnoreSSLCertificate: Boolean(process.env.NODE_TLS_REJECT_UNAUTHORIZED),
   // Used for Batch reporting
   datadogApiKey: process.env.DD_API_KEY || '',
diff --git a/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap b/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap
index 73111f329..a10eb1858 100644
--- a/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap
+++ b/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap
@@ -1513,10 +1513,6 @@ exports[`non-tests to document the State Machines - qc production with specific
               "Name": "BATCH",
               "Value": "true"
             },
-            {
-              "Name": "IGNORE_SSL_CERTIFICATE",
-              "Value": "false"
-            },
             {
               "Name": "DOMAIN_NAME",
               "Value": "scp.biomage.net"

From 1d112932c2cba90ceca8f173c14e798aa9f8c789 Mon Sep 17 00:00:00 2001
From: stefanbabukov <stefanbabukov98@gmail.com>
Date: Thu, 2 Mar 2023 12:18:54 +0000
Subject: [PATCH 44/65] added retry

---
 src/api.v2/helpers/s3/getS3Client.js | 1 +
 src/api.v2/helpers/s3/putObject.js   | 4 ++--
 2 files changed, 3 insertions(+), 2 deletions(-)

diff --git a/src/api.v2/helpers/s3/getS3Client.js b/src/api.v2/helpers/s3/getS3Client.js
index 6f7f9b540..a390be60d 100644
--- a/src/api.v2/helpers/s3/getS3Client.js
+++ b/src/api.v2/helpers/s3/getS3Client.js
@@ -9,6 +9,7 @@ const getS3Client = (options) => {
     apiVersion: '2006-03-01',
     signatureVersion: 'v4',
     region: config.awsRegion,
+    maxRetries: 3,
     ...options,
   };
 
diff --git a/src/api.v2/helpers/s3/putObject.js b/src/api.v2/helpers/s3/putObject.js
index 29ea2aa36..a87123097 100644
--- a/src/api.v2/helpers/s3/putObject.js
+++ b/src/api.v2/helpers/s3/putObject.js
@@ -1,7 +1,7 @@
 const NotFoundError = require('../../../utils/responses/NotFoundError');
 const getS3Client = require('./getS3Client');
 
-const pubObject = async (params) => {
+const putObject = async (params) => {
   if (!params.Bucket) throw new Error('Bucket is required');
   if (!params.Key) throw new Error('Key is required');
   if (!params.Body) throw new Error('Body is required');
@@ -19,4 +19,4 @@ const pubObject = async (params) => {
   }
 };
 
-module.exports = pubObject;
+module.exports = putObject;

From d0e9e9833967c64953641f8e9093ceab25a43ee9 Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Fri, 3 Mar 2023 14:31:35 +0000
Subject: [PATCH 45/65] remove params_hash

---
 src/api.v2/model/Experiment.js                     |  2 +-
 src/api.v2/model/ExperimentExecution.js            |  2 +-
 tests/api.v2/model/Experiment.test.js              |  2 +-
 .../model/__snapshots__/Experiment.test.js.snap    |  2 +-
 tests/utils/parseSNSMessage.test.js                | 14 +++++++-------
 5 files changed, 11 insertions(+), 11 deletions(-)

diff --git a/src/api.v2/model/Experiment.js b/src/api.v2/model/Experiment.js
index 24908b9e2..1b7106f04 100644
--- a/src/api.v2/model/Experiment.js
+++ b/src/api.v2/model/Experiment.js
@@ -106,7 +106,7 @@ class Experiment extends BasicModel {
     }
 
     const experimentExecutionFields = [
-      'params_hash', 'state_machine_arn', 'execution_arn', 'last_gem2s_params',
+      'state_machine_arn', 'execution_arn', 'last_gem2s_params',
     ];
 
     const pipelineExecutionKeys = experimentExecutionFields.reduce((acum, current) => {
diff --git a/src/api.v2/model/ExperimentExecution.js b/src/api.v2/model/ExperimentExecution.js
index 283fc7ab1..d773fe932 100644
--- a/src/api.v2/model/ExperimentExecution.js
+++ b/src/api.v2/model/ExperimentExecution.js
@@ -5,7 +5,7 @@ const tableNames = require('./tableNames');
 
 const selectableProps = [
   'experiment_id', 'pipeline_type', 'state_machine_arn', 'execution_arn',
-  'last_status_response', 'last_gem2s_params', 'params_hash',
+  'last_status_response', 'last_gem2s_params',
 ];
 
 class ExperimentExecution extends BasicModel {
diff --git a/tests/api.v2/model/Experiment.test.js b/tests/api.v2/model/Experiment.test.js
index 5753afbea..0888370f4 100644
--- a/tests/api.v2/model/Experiment.test.js
+++ b/tests/api.v2/model/Experiment.test.js
@@ -22,7 +22,7 @@ jest.mock('../../../src/sql/helpers', () => ({
   collapseKeysIntoObject: jest.fn(),
   collapseKeyIntoArray: jest.fn(),
   replaceNullsWithObject: () => (`COALESCE(
-      jsonb_object_agg(pipeline_type, jsonb_build_object('params_hash', params_hash, 'state_machine_arn', state_machine_arn, 'execution_arn', execution_arn))
+      jsonb_object_agg(pipeline_type, jsonb_build_object('state_machine_arn', state_machine_arn, 'execution_arn', execution_arn))
       FILTER(
         WHERE pipeline_type IS NOT NULL
       ),
diff --git a/tests/api.v2/model/__snapshots__/Experiment.test.js.snap b/tests/api.v2/model/__snapshots__/Experiment.test.js.snap
index 86434cb1e..9f66c4be0 100644
--- a/tests/api.v2/model/__snapshots__/Experiment.test.js.snap
+++ b/tests/api.v2/model/__snapshots__/Experiment.test.js.snap
@@ -25,7 +25,7 @@ Array [
 exports[`model/Experiment getExperimentData works correctly 1`] = `
 Array [
   "COALESCE(
-      jsonb_object_agg(pipeline_type, jsonb_build_object('params_hash', params_hash, 'state_machine_arn', state_machine_arn, 'execution_arn', execution_arn))
+      jsonb_object_agg(pipeline_type, jsonb_build_object('state_machine_arn', state_machine_arn, 'execution_arn', execution_arn))
       FILTER(
         WHERE pipeline_type IS NOT NULL
       ),
diff --git a/tests/utils/parseSNSMessage.test.js b/tests/utils/parseSNSMessage.test.js
index 7fea9ca8e..c1c9ca779 100644
--- a/tests/utils/parseSNSMessage.test.js
+++ b/tests/utils/parseSNSMessage.test.js
@@ -35,7 +35,7 @@ describe('parseSNSMessage', () => {
     const mockReq = {
       params: { experimentId },
       headers: { authorization: 'mockAuthorization', 'x-amz-sns-topic-arn': receivedTopicArn },
-      body: JSON.stringify({ paramsHash: 'mockParamsHash' }),
+
     };
 
     await expect(parseSNSMessage(mockReq, expectedTopicArn)).rejects.toThrow(new Error('SNS topic doesn\'t match'));
@@ -45,17 +45,17 @@ describe('parseSNSMessage', () => {
     const mockReq = {
       params: { experimentId },
       headers: { authorization: 'mockAuthorization', 'x-amz-sns-topic-arn': expectedTopicArn },
-      body: '{ paramsHash: /invalid/ }',
+      body: '{ invalid: /body/ }',
     };
 
-    await expect(parseSNSMessage(mockReq, expectedTopicArn)).rejects.toThrow(new Error('Unexpected token p in JSON at position 2'));
+    await expect(parseSNSMessage(mockReq, expectedTopicArn)).rejects.toThrow(new Error('Unexpected token i in JSON at position 2'));
   });
 
   it('Fails if request body parsing doesnt work', async () => {
     const mockReq = {
       params: { experimentId },
       headers: { authorization: 'mockAuthorization', 'x-amz-sns-topic-arn': expectedTopicArn },
-      body: JSON.stringify({ paramsHash: 'mockParamsHash' }),
+      body: JSON.stringify({}),
     };
 
     mockValidate.mockImplementation(() => { throw new Error('Validation error'); });
@@ -67,7 +67,7 @@ describe('parseSNSMessage', () => {
     const mockReq = {
       params: { experimentId },
       headers: { authorization: 'mockAuthorization', 'x-amz-sns-topic-arn': expectedTopicArn },
-      body: JSON.stringify({ paramsHash: 'mockParamsHash' }),
+      body: JSON.stringify({}),
     };
 
     const mockMsg = 'mockMsg';
@@ -87,7 +87,7 @@ describe('parseSNSMessage', () => {
     const mockReq = {
       params: { experimentId },
       headers: { authorization: 'mockAuthorization', 'x-amz-sns-topic-arn': expectedTopicArn },
-      body: JSON.stringify({ paramsHash: 'mockParamsHash' }),
+      body: JSON.stringify({}),
     };
 
     const mockMsg = 'mockMsg';
@@ -108,7 +108,7 @@ describe('parseSNSMessage', () => {
     const mockReq = {
       params: { experimentId },
       headers: { authorization: 'mockAuthorization', 'x-amz-sns-topic-arn': expectedTopicArn },
-      body: JSON.stringify({ paramsHash: 'mockParamsHash' }),
+      body: JSON.stringify({}),
       app: { get: jest.fn(() => mockIo) },
     };
 

From 860e36134d60a01d6bfd8adffd5f0174d18a6cec Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Fri, 3 Mar 2023 14:31:54 +0000
Subject: [PATCH 46/65] create params hash migration

---
 ...20230303140501_delete_params_hash_field.js | 19 +++++++++++++++++++
 1 file changed, 19 insertions(+)
 create mode 100644 src/sql/migrations/20230303140501_delete_params_hash_field.js

diff --git a/src/sql/migrations/20230303140501_delete_params_hash_field.js b/src/sql/migrations/20230303140501_delete_params_hash_field.js
new file mode 100644
index 000000000..5db8c9a9e
--- /dev/null
+++ b/src/sql/migrations/20230303140501_delete_params_hash_field.js
@@ -0,0 +1,19 @@
+/**
+ * @param { import("knex").Knex } knex
+ * @returns { Promise<void> }
+ */
+exports.up = async function (knex) {
+  await knex.schema.alterTable('experiment_execution', (table) => {
+    table.dropColumn('params_hash');
+  });
+};
+
+/**
+ * @param { import("knex").Knex } knex
+ * @returns { Promise<void> }
+ */
+exports.down = async function (knex) {
+  await knex.schema.alterTable('experiment_execution', (table) => {
+    table.string('params_hash').nullable();
+  });
+};

From f040419a239ae728cc76102b277b52ac7bf67a4f Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Fri, 3 Mar 2023 12:08:42 -0300
Subject: [PATCH 47/65] TMP disable unit tests to be able to tst in staging

---
 .github/workflows/ci.yaml | 24 ++++++++++++------------
 1 file changed, 12 insertions(+), 12 deletions(-)

diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index b60770f3a..13b4806a4 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -78,18 +78,18 @@ jobs:
         git config --global url."https://".insteadOf ssh://
         npm ci
 
-    - id: test-codecov
-      name: Run unit tests with coverage
-      uses: mattallty/jest-github-action@v1
-      env:
-        AWS_DEFAULT_REGION: 'eu-west-1'
-        GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-      with:
-        test-command: "npm run coverage"
-        coverage-comment: false
-
-    - name: Upload coverage to Codecov
-      uses: codecov/codecov-action@v1
+    # - id: test-codecov
+    #   name: Run unit tests with coverage
+    #   uses: mattallty/jest-github-action@v1
+    #   env:
+    #     AWS_DEFAULT_REGION: 'eu-west-1'
+    #     GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+    #   with:
+    #     test-command: "npm run coverage"
+    #     coverage-comment: false
+
+    # - name: Upload coverage to Codecov
+    #   uses: codecov/codecov-action@v1
 
   build-docker:
     name: Build Docker container

From 1b5c7e154e5a2edf5274d5fd177041c630cffc5f Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Fri, 3 Mar 2023 13:25:36 -0300
Subject: [PATCH 48/65] Add the submitmarkerheatmap from the api but disabled
 and with an expanation

---
 .../helpers/pipeline/handleQCResponse.js      |  6 +++++
 .../workSubmit/submitMarkerHeatmapWork.js     | 24 +++++++++++++++++++
 2 files changed, 30 insertions(+)
 create mode 100644 src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js

diff --git a/src/api.v2/helpers/pipeline/handleQCResponse.js b/src/api.v2/helpers/pipeline/handleQCResponse.js
index eb741d684..e7db1bc4a 100644
--- a/src/api.v2/helpers/pipeline/handleQCResponse.js
+++ b/src/api.v2/helpers/pipeline/handleQCResponse.js
@@ -18,6 +18,7 @@ const getPipelineStatus = require('./getPipelineStatus');
 const Experiment = require('../../model/Experiment');
 const Plot = require('../../model/Plot');
 const submitEmbeddingWork = require('../worker/workSubmit/submitEmbeddingWork');
+// const submitMarkerHeatmapWork = require('../worker/workSubmit/submitMarkerHeatmapWork');
 
 const logger = getLogger();
 
@@ -25,6 +26,11 @@ const hookRunner = new HookRunner();
 
 hookRunner.register(constants.ASSIGN_POD_TO_PIPELINE, [assignPodToPipeline]);
 hookRunner.registerAll([sendNotification]);
+// Temporarily disable api submitMarkerHeatmapWork
+// Running it for bigger experiments has the potential to make the api crash
+// or get blocked for a pretty long time
+// hookRunner.register('configureEmbedding', [cleanupPods, updatePipelineVersion,
+//  submitEmbeddingWork, submitMarkerHeatmapWork]);
 hookRunner.register('configureEmbedding', [cleanupPods, updatePipelineVersion, submitEmbeddingWork]);
 
 const getOutputFromS3 = async (message) => {
diff --git a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
new file mode 100644
index 000000000..aa4a402d2
--- /dev/null
+++ b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
@@ -0,0 +1,24 @@
+const getExtraDependencies = require('./getExtraDependencies');
+const submitWork = require('./submitWork');
+
+
+const submitMarkerHeatmapWork = async (message) => {
+  const { experimentId, input: { authJWT } } = message;
+
+  const numGenes = 5;
+  const selectedCellSet = 'louvain';
+
+  const body = {
+    name: 'MarkerHeatmap',
+    nGenes: numGenes,
+    cellSetKey: selectedCellSet,
+  };
+
+  const extraDependencies = await getExtraDependencies(body.name, message);
+  const ETag = await submitWork(experimentId, authJWT, body, extraDependencies);
+
+  // explicitly return ETag to make it stand out more in tests and so harder to break
+  return ETag;
+};
+
+module.exports = submitMarkerHeatmapWork;

From 5e62318b560523ae7d379842947424fad0b087d9 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Fri, 3 Mar 2023 16:15:50 -0300
Subject: [PATCH 49/65] Add one comment

---
 src/api.v2/helpers/pipeline/handleQCResponse.js | 1 +
 1 file changed, 1 insertion(+)

diff --git a/src/api.v2/helpers/pipeline/handleQCResponse.js b/src/api.v2/helpers/pipeline/handleQCResponse.js
index e7db1bc4a..c94e1b973 100644
--- a/src/api.v2/helpers/pipeline/handleQCResponse.js
+++ b/src/api.v2/helpers/pipeline/handleQCResponse.js
@@ -29,6 +29,7 @@ hookRunner.registerAll([sendNotification]);
 // Temporarily disable api submitMarkerHeatmapWork
 // Running it for bigger experiments has the potential to make the api crash
 // or get blocked for a pretty long time
+// Enable when the downsampling is moved to the worker
 // hookRunner.register('configureEmbedding', [cleanupPods, updatePipelineVersion,
 //  submitEmbeddingWork, submitMarkerHeatmapWork]);
 hookRunner.register('configureEmbedding', [cleanupPods, updatePipelineVersion, submitEmbeddingWork]);

From 659bc538723320a7469ee95133225d39a6901822 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Fri, 3 Mar 2023 16:33:38 -0300
Subject: [PATCH 50/65] Update WorkRequestMarkerGenes

---
 .../WorkRequestMarkerGenes.v2.yaml                   | 12 +++++++++---
 1 file changed, 9 insertions(+), 3 deletions(-)

diff --git a/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml b/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
index ef42868e9..221538da1 100644
--- a/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
+++ b/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
@@ -11,10 +11,16 @@ properties:
     description: Amount of marker genes to get
   cellSetKey:
     type: string
-    description: Cell set key to be be used in the heatmap
-  cellIds:
+    description: the cellClass key to be used in the marker genes
+  groupByClasses:
     type: array
-    description: CellIds
+    description: The keys of the cellSet classes that the cells are grouped by
+  selectedPoints:
+    type: string
+    description: Can be either 'All' or the key of a cellClass
+  hiddenCellSetKeys: 
+    type: array
+    description: The keys of the cellSets that are hidden
 required:
   - name
   - nGenes

From a96e0832e259a0231620856f735a766bb1ef9c7d Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Fri, 3 Mar 2023 19:21:12 -0300
Subject: [PATCH 51/65] WIP - Comment out the validation so the work request
 goes through

---
 .../WorkRequestMarkerGenes.v2.yaml            | 22 +++++++++++--------
 1 file changed, 13 insertions(+), 9 deletions(-)

diff --git a/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml b/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
index 221538da1..5a99e092b 100644
--- a/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
+++ b/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
@@ -12,15 +12,19 @@ properties:
   cellSetKey:
     type: string
     description: the cellClass key to be used in the marker genes
-  groupByClasses:
-    type: array
-    description: The keys of the cellSet classes that the cells are grouped by
-  selectedPoints:
-    type: string
-    description: Can be either 'All' or the key of a cellClass
-  hiddenCellSetKeys: 
-    type: array
-    description: The keys of the cellSets that are hidden
+  # groupByClasses:
+  #   description: The keys of the cellSet classes that the cells are grouped by
+  #   type: array
+  #   items:
+  #     type: string
+  # selectedPoints:
+  #   type: string
+  #   description: Can be either `All` or the key of a cellClass
+  # hiddenCellSetKeys:
+  #   description: The keys of the cellSets that are hidden
+  #   type: array
+  #   items:
+  #     type: string
 required:
   - name
   - nGenes

From 35b1ed5944e0d6943c5636da07d24545eed6e99f Mon Sep 17 00:00:00 2001
From: Anugerah Erlaut <aerlaut@live.com>
Date: Mon, 6 Mar 2023 11:43:53 +0000
Subject: [PATCH 52/65] add sandbox id to batch jobdef

---
 .../pipeline/pipelineConstruct/constructors/submitBatchJob.js   | 2 +-
 .../pipeline/__snapshots__/stateMachineSkeletons.test.js.snap   | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js b/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js
index a36cd0f65..f037f518d 100644
--- a/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js
+++ b/src/api.v2/helpers/pipeline/pipelineConstruct/constructors/submitBatchJob.js
@@ -16,7 +16,7 @@ const submitBatchJob = (context, step) => {
     Type: 'Task',
     Resource: 'arn:aws:states:::batch:submitJob',
     Parameters: {
-      JobDefinition: `job-pipeline-${environment}`,
+      JobDefinition: `job-pipeline-${environment}-${config.sandboxId}`,
       JobName: `${environment}-${experimentId}-${processName}`, // changing the name will break job termination when a new one is submitted
       JobQueue: `queue-pipeline-${environment}`,
       ContainerOverrides: {
diff --git a/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap b/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap
index a10eb1858..681e91a95 100644
--- a/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap
+++ b/tests/api.v2/helpers/pipeline/__snapshots__/stateMachineSkeletons.test.js.snap
@@ -1480,7 +1480,7 @@ exports[`non-tests to document the State Machines - qc production with specific
       "Type": "Task",
       "Resource": "arn:aws:states:::batch:submitJob",
       "Parameters": {
-        "JobDefinition": "job-pipeline-production",
+        "JobDefinition": "job-pipeline-production-default",
         "JobName": "production-mock-experiment-id-qc",
         "JobQueue": "queue-pipeline-production",
         "ContainerOverrides": {

From 3936f7bfbf464c37f909c94eec76793aaf56027f Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Mon, 6 Mar 2023 14:18:25 -0300
Subject: [PATCH 53/65] Reenable submitMarkerHeatmapWork and add new params

---
 src/api.v2/helpers/pipeline/handleQCResponse.js        | 10 ++--------
 .../worker/workSubmit/submitMarkerHeatmapWork.js       |  3 +++
 2 files changed, 5 insertions(+), 8 deletions(-)

diff --git a/src/api.v2/helpers/pipeline/handleQCResponse.js b/src/api.v2/helpers/pipeline/handleQCResponse.js
index c94e1b973..15958d7ae 100644
--- a/src/api.v2/helpers/pipeline/handleQCResponse.js
+++ b/src/api.v2/helpers/pipeline/handleQCResponse.js
@@ -18,7 +18,7 @@ const getPipelineStatus = require('./getPipelineStatus');
 const Experiment = require('../../model/Experiment');
 const Plot = require('../../model/Plot');
 const submitEmbeddingWork = require('../worker/workSubmit/submitEmbeddingWork');
-// const submitMarkerHeatmapWork = require('../worker/workSubmit/submitMarkerHeatmapWork');
+const submitMarkerHeatmapWork = require('../worker/workSubmit/submitMarkerHeatmapWork');
 
 const logger = getLogger();
 
@@ -26,13 +26,7 @@ const hookRunner = new HookRunner();
 
 hookRunner.register(constants.ASSIGN_POD_TO_PIPELINE, [assignPodToPipeline]);
 hookRunner.registerAll([sendNotification]);
-// Temporarily disable api submitMarkerHeatmapWork
-// Running it for bigger experiments has the potential to make the api crash
-// or get blocked for a pretty long time
-// Enable when the downsampling is moved to the worker
-// hookRunner.register('configureEmbedding', [cleanupPods, updatePipelineVersion,
-//  submitEmbeddingWork, submitMarkerHeatmapWork]);
-hookRunner.register('configureEmbedding', [cleanupPods, updatePipelineVersion, submitEmbeddingWork]);
+hookRunner.register('configureEmbedding', [cleanupPods, updatePipelineVersion, submitEmbeddingWork, submitMarkerHeatmapWork]);
 
 const getOutputFromS3 = async (message) => {
   const { output: { bucket, key } } = message;
diff --git a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
index aa4a402d2..74b2101f0 100644
--- a/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
+++ b/src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork.js
@@ -12,6 +12,9 @@ const submitMarkerHeatmapWork = async (message) => {
     name: 'MarkerHeatmap',
     nGenes: numGenes,
     cellSetKey: selectedCellSet,
+    groupByClasses: ['louvain'],
+    selectedPoints: 'All',
+    hiddenCellSetKeys: [],
   };
 
   const extraDependencies = await getExtraDependencies(body.name, message);

From cb7d691bf928f06bcfb779e5d9a5ec4a76627c26 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Mon, 6 Mar 2023 14:18:47 -0300
Subject: [PATCH 54/65] Add tests again

---
 .../worker/submitMarkerHeatmapWork.test.js    | 64 +++++++++++++++++++
 1 file changed, 64 insertions(+)
 create mode 100644 tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js

diff --git a/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js b/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js
new file mode 100644
index 000000000..60327796a
--- /dev/null
+++ b/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js
@@ -0,0 +1,64 @@
+const createObjectHash = require('../../../../src/api.v2/helpers/worker/createObjectHash');
+const submitMarkerHeatmapWork = require('../../../../src/api.v2/helpers/worker/workSubmit/submitMarkerHeatmapWork');
+const validateAndSubmitWork = require('../../../../src/api.v2/events/validateAndSubmitWork');
+
+jest.mock('../../../../src/api.v2/helpers/worker/createObjectHash');
+jest.mock('../../../../src/api.v2/helpers/pipeline/getPipelineStatus');
+jest.mock('../../../../src/api.v2/helpers/worker/getWorkerStatus');
+jest.mock('../../../../src/api.v2/events/validateAndSubmitWork');
+
+
+const message = {
+  experimentId: '6463cb35-3e08-4e94-a181-6d155a5ca570',
+  taskName: 'configureEmbedding',
+  input: {
+    experimentId: '6463cb35-3e08-4e94-a181-6d155a5ca570',
+    taskName: 'configureEmbedding',
+    processName: 'qc',
+    sampleUuid: '',
+    uploadCountMatrix: false,
+    authJWT: 'Bearer whatever',
+    config: {
+      embeddingSettings: {
+        method: 'umap',
+        methodSettings: {
+          tsne: { perplexity: 30, learningRate: 200 },
+          umap: { distanceMetric: 'cosine', minimumDistance: 0.3 },
+        },
+      },
+      clusteringSettings: {
+        method: 'louvain',
+        methodSettings: { louvain: { resolution: 0.8 } },
+      },
+    },
+
+  },
+  output: {
+    bucket: 'worker-results-development-000000000000',
+    key: '0eabfedf-0efe-4abf-8725-7062c54ed5e1',
+  },
+  response: { error: false },
+  pipelineVersion: 2,
+  apiUrl: null,
+};
+
+describe('submitWorkEmbedding', () => {
+  // If this test fails it means you have changed parameters upon which the feature or precomputing
+  // the embedding / marker heatmp feature depends on. These parameters are duplicated
+  // in the UI / API if you have changed them here, make sure you change them in the
+  // other repository or that feature will stop working.
+  it('submits the work and the ETag / params are correct', async () => {
+    const ETag = await submitMarkerHeatmapWork(message);
+
+
+    // these are the parameters used to created the ETag and
+    // they should match exactly UI snapshot:
+    // loadMarkerGenes.defaultParams.test.js.snap
+    expect(createObjectHash.mock.calls).toMatchSnapshot();
+    // this ETag should match exactly the one in
+    // loadMarkerGenes.defaultParams.test.js
+    expect(ETag).toEqual('9db473fff00ea358446196ee3276f486'); // pragma: allowlist secret
+
+    expect(validateAndSubmitWork).toBeCalledTimes(1);
+  });
+});

From 6ae58735c57abaaa02be8628cce521fcce68c468 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Mon, 6 Mar 2023 15:50:54 -0300
Subject: [PATCH 55/65] Update tests

---
 .../__snapshots__/submitMarkerHeatmapWork.test.js.snap       | 5 +++++
 tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js  | 2 +-
 2 files changed, 6 insertions(+), 1 deletion(-)

diff --git a/tests/api.v2/helpers/worker/__snapshots__/submitMarkerHeatmapWork.test.js.snap b/tests/api.v2/helpers/worker/__snapshots__/submitMarkerHeatmapWork.test.js.snap
index 9501bcd70..f5c7bd6e9 100644
--- a/tests/api.v2/helpers/worker/__snapshots__/submitMarkerHeatmapWork.test.js.snap
+++ b/tests/api.v2/helpers/worker/__snapshots__/submitMarkerHeatmapWork.test.js.snap
@@ -6,8 +6,13 @@ Array [
     Object {
       "body": Object {
         "cellSetKey": "louvain",
+        "groupByClasses": Array [
+          "louvain",
+        ],
+        "hiddenCellSetKeys": Array [],
         "nGenes": 5,
         "name": "MarkerHeatmap",
+        "selectedPoints": "All",
       },
       "cacheUniquenessKey": null,
       "experimentId": "6463cb35-3e08-4e94-a181-6d155a5ca570",
diff --git a/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js b/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js
index 60327796a..43926a4db 100644
--- a/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js
+++ b/tests/api.v2/helpers/worker/submitMarkerHeatmapWork.test.js
@@ -57,7 +57,7 @@ describe('submitWorkEmbedding', () => {
     expect(createObjectHash.mock.calls).toMatchSnapshot();
     // this ETag should match exactly the one in
     // loadMarkerGenes.defaultParams.test.js
-    expect(ETag).toEqual('9db473fff00ea358446196ee3276f486'); // pragma: allowlist secret
+    expect(ETag).toEqual('a973357e8c6273f0f154a179b12c6d25'); // pragma: allowlist secret
 
     expect(validateAndSubmitWork).toBeCalledTimes(1);
   });

From c81f444d4563b9adfcf0c074e4bf1d1a45930903 Mon Sep 17 00:00:00 2001
From: stefanbabukov <stefanbabukov98@gmail.com>
Date: Wed, 8 Mar 2023 12:34:20 +0000
Subject: [PATCH 56/65] update snapshot

---
 tests/api.v2/helpers/s3/__snapshots__/getS3Client.test.js.snap | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/tests/api.v2/helpers/s3/__snapshots__/getS3Client.test.js.snap b/tests/api.v2/helpers/s3/__snapshots__/getS3Client.test.js.snap
index e84829656..864751150 100644
--- a/tests/api.v2/helpers/s3/__snapshots__/getS3Client.test.js.snap
+++ b/tests/api.v2/helpers/s3/__snapshots__/getS3Client.test.js.snap
@@ -3,6 +3,7 @@
 exports[`getS3Client Returns an S3 client with defau lt config values if not given any params 1`] = `
 Object {
   "apiVersion": "2006-03-01",
+  "maxRetries": 3,
   "region": "eu-west-1",
   "signatureVersion": "v4",
 }
@@ -12,6 +13,7 @@ exports[`getS3Client Takes in params and return S3 client with those params 1`]
 Object {
   "apiVersion": "2006-03-01",
   "endpointUrl": "https://s3.biomage-cloud.com",
+  "maxRetries": 3,
   "region": "us-east-1",
   "signatureVersion": "v4",
 }

From 2524bebc98681e7c7009b2ad891612d8fbd56217 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Wed, 8 Mar 2023 09:44:02 -0300
Subject: [PATCH 57/65] Updte WorkRequestMarkerGenes.v2.yaml

---
 .../WorkRequestMarkerGenes.v2.yaml            | 29 ++++++++++---------
 1 file changed, 16 insertions(+), 13 deletions(-)

diff --git a/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml b/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
index 5a99e092b..1e659c967 100644
--- a/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
+++ b/src/specs/models/work-request-bodies/WorkRequestMarkerGenes.v2.yaml
@@ -12,20 +12,23 @@ properties:
   cellSetKey:
     type: string
     description: the cellClass key to be used in the marker genes
-  # groupByClasses:
-  #   description: The keys of the cellSet classes that the cells are grouped by
-  #   type: array
-  #   items:
-  #     type: string
-  # selectedPoints:
-  #   type: string
-  #   description: Can be either `All` or the key of a cellClass
-  # hiddenCellSetKeys:
-  #   description: The keys of the cellSets that are hidden
-  #   type: array
-  #   items:
-  #     type: string
+  groupByClasses:
+    description: The keys of the cellSet classes that the cells are grouped by
+    type: array
+    items:
+      type: string
+  selectedPoints:
+    type: string
+    description: Can be either `All` or the key of a cellClass
+  hiddenCellSetKeys:
+    description: The keys of the cellSets that are hidden
+    type: array
+    items:
+      type: string
 required:
   - name
   - nGenes
   - cellSetKey
+  - groupByClasses
+  - selectedPoints
+  - hiddenCellSetKeys
\ No newline at end of file

From 217d95a36742b5dc04793f2f078396ebca3f25f0 Mon Sep 17 00:00:00 2001
From: Pol Alvarez <pol.avms@gmail.com>
Date: Wed, 8 Mar 2023 17:54:24 +0100
Subject: [PATCH 58/65] using batch by default

---
 src/api.v2/controllers/experimentController.js    | 14 +++++++++++++-
 .../experimentController.test.js.snap             | 15 +++++++++++++++
 .../controllers/experimentController.test.js      |  6 +-----
 3 files changed, 29 insertions(+), 6 deletions(-)
 create mode 100644 tests/api.v2/controllers/__snapshots__/experimentController.test.js.snap

diff --git a/src/api.v2/controllers/experimentController.js b/src/api.v2/controllers/experimentController.js
index 9304e5e1d..b103582b0 100644
--- a/src/api.v2/controllers/experimentController.js
+++ b/src/api.v2/controllers/experimentController.js
@@ -12,9 +12,17 @@ const Sample = require('../model/Sample');
 const invalidatePlotsForEvent = require('../../utils/plotConfigInvalidation/invalidatePlotsForEvent');
 const events = require('../../utils/plotConfigInvalidation/events');
 const getAdminSub = require('../../utils/getAdminSub');
+const config = require('../../config');
 
 const logger = getLogger('[ExperimentController] - ');
 
+const getDefaultCPUMem = (env) => {
+  if (env === 'development') {
+    return { podCPUs: null, podMemory: null };
+  }
+  return { podCPUs: 2, podMemory: 28000 };
+};
+
 const getAllExperiments = async (req, res) => {
   const { user: { sub: userId } } = req;
   logger.log(`Getting all experiments for user: ${userId}`);
@@ -49,8 +57,12 @@ const createExperiment = async (req, res) => {
   const { name, description } = body;
   logger.log('Creating experiment');
 
+  const { podCPUs, podMemory } = getDefaultCPUMem(config.clusterEnv);
+
   await sqlClient.get().transaction(async (trx) => {
-    await new Experiment(trx).create({ id: experimentId, name, description });
+    await new Experiment(trx).create({
+      id: experimentId, name, description, pod_cpus: podCPUs, pod_memory: podMemory,
+    });
     await new UserAccess(trx).createNewExperimentPermissions(user.sub, experimentId);
   });
 
diff --git a/tests/api.v2/controllers/__snapshots__/experimentController.test.js.snap b/tests/api.v2/controllers/__snapshots__/experimentController.test.js.snap
new file mode 100644
index 000000000..df0a76b8e
--- /dev/null
+++ b/tests/api.v2/controllers/__snapshots__/experimentController.test.js.snap
@@ -0,0 +1,15 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`experimentController createExperiment works correctly 1`] = `
+Array [
+  Array [
+    Object {
+      "description": "mockDescription",
+      "id": "mockExperimentId",
+      "name": "mockName",
+      "pod_cpus": 2,
+      "pod_memory": 28000,
+    },
+  ],
+]
+`;
diff --git a/tests/api.v2/controllers/experimentController.test.js b/tests/api.v2/controllers/experimentController.test.js
index 94d6f43da..6dd21decd 100644
--- a/tests/api.v2/controllers/experimentController.test.js
+++ b/tests/api.v2/controllers/experimentController.test.js
@@ -124,11 +124,7 @@ describe('experimentController', () => {
     expect(Experiment).not.toHaveBeenCalledWith(mockSqlClient);
     expect(UserAccess).not.toHaveBeenCalledWith(mockSqlClient);
 
-    expect(experimentInstance.create).toHaveBeenCalledWith({
-      id: mockExperiment.id,
-      name: 'mockName',
-      description: 'mockDescription',
-    });
+    expect(experimentInstance.create.mock.calls).toMatchSnapshot();
 
     expect(userAccessInstance.createNewExperimentPermissions).toHaveBeenCalledWith('mockSub', mockExperiment.id);
 

From 8f2ef893e793376afea695d2e7c52760f1fd7854 Mon Sep 17 00:00:00 2001
From: Pol Alvarez <pol.avms@gmail.com>
Date: Thu, 9 Mar 2023 10:39:38 +0100
Subject: [PATCH 59/65] changed default memory values for staging

---
 src/api.v2/controllers/experimentController.js | 10 +++++++---
 1 file changed, 7 insertions(+), 3 deletions(-)

diff --git a/src/api.v2/controllers/experimentController.js b/src/api.v2/controllers/experimentController.js
index b103582b0..30cbaeaca 100644
--- a/src/api.v2/controllers/experimentController.js
+++ b/src/api.v2/controllers/experimentController.js
@@ -17,10 +17,14 @@ const config = require('../../config');
 const logger = getLogger('[ExperimentController] - ');
 
 const getDefaultCPUMem = (env) => {
-  if (env === 'development') {
-    return { podCPUs: null, podMemory: null };
+  switch (env) {
+    case 'development':
+      return { podCPUs: null, podMemory: null };
+    case 'staging':
+      return { podCPUs: 1, podMemory: 14000 };
+    default:
+      return { podCPUs: 2, podMemory: 28000 };
   }
-  return { podCPUs: 2, podMemory: 28000 };
 };
 
 const getAllExperiments = async (req, res) => {

From 0a67599fef1969c93837125d08900af4328ffc6c Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Thu, 9 Mar 2023 14:44:29 -0300
Subject: [PATCH 60/65] Reenable tests

---
 .github/workflows/ci.yaml | 24 ++++++++++++------------
 1 file changed, 12 insertions(+), 12 deletions(-)

diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 13b4806a4..b60770f3a 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -78,18 +78,18 @@ jobs:
         git config --global url."https://".insteadOf ssh://
         npm ci
 
-    # - id: test-codecov
-    #   name: Run unit tests with coverage
-    #   uses: mattallty/jest-github-action@v1
-    #   env:
-    #     AWS_DEFAULT_REGION: 'eu-west-1'
-    #     GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-    #   with:
-    #     test-command: "npm run coverage"
-    #     coverage-comment: false
-
-    # - name: Upload coverage to Codecov
-    #   uses: codecov/codecov-action@v1
+    - id: test-codecov
+      name: Run unit tests with coverage
+      uses: mattallty/jest-github-action@v1
+      env:
+        AWS_DEFAULT_REGION: 'eu-west-1'
+        GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+      with:
+        test-command: "npm run coverage"
+        coverage-comment: false
+
+    - name: Upload coverage to Codecov
+      uses: codecov/codecov-action@v1
 
   build-docker:
     name: Build Docker container

From 852545e3086cd2a84dd7e3d48aa47fe6e82ab4a2 Mon Sep 17 00:00:00 2001
From: cosa65 <martin@biomage.net>
Date: Thu, 9 Mar 2023 14:44:58 -0300
Subject: [PATCH 61/65] Make linter happy

---
 src/sql/migrations/20230303140501_delete_params_hash_field.js | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/sql/migrations/20230303140501_delete_params_hash_field.js b/src/sql/migrations/20230303140501_delete_params_hash_field.js
index 5db8c9a9e..96d684e79 100644
--- a/src/sql/migrations/20230303140501_delete_params_hash_field.js
+++ b/src/sql/migrations/20230303140501_delete_params_hash_field.js
@@ -2,7 +2,7 @@
  * @param { import("knex").Knex } knex
  * @returns { Promise<void> }
  */
-exports.up = async function (knex) {
+exports.up = async (knex) => {
   await knex.schema.alterTable('experiment_execution', (table) => {
     table.dropColumn('params_hash');
   });
@@ -12,7 +12,7 @@ exports.up = async function (knex) {
  * @param { import("knex").Knex } knex
  * @returns { Promise<void> }
  */
-exports.down = async function (knex) {
+exports.down = async (knex) => {
   await knex.schema.alterTable('experiment_execution', (table) => {
     table.string('params_hash').nullable();
   });

From 8841fee433e61d65f74e5343cec616dcceda0403 Mon Sep 17 00:00:00 2001
From: Alex Pickering <alexvpickering@gmail.com>
Date: Tue, 14 Mar 2023 12:58:13 -0700
Subject: [PATCH 62/65] update snaps

Signed-off-by: Alex Pickering <alexvpickering@gmail.com>
---
 .../default-config-production.test.js.snap    | 41 ++++++++++++++++++-
 ...ault-config-staging-sandboxid.test.js.snap |  1 +
 .../default-config-staging.test.js.snap       |  1 +
 3 files changed, 41 insertions(+), 2 deletions(-)

diff --git a/tests/config/__snapshots__/default-config-production.test.js.snap b/tests/config/__snapshots__/default-config-production.test.js.snap
index 20d0fcb23..a3a0efc60 100644
--- a/tests/config/__snapshots__/default-config-production.test.js.snap
+++ b/tests/config/__snapshots__/default-config-production.test.js.snap
@@ -1,11 +1,12 @@
 // Jest Snapshot v1, https://goo.gl/fbAQLP
 
-exports[`default-config Returns correct values for production 1`] = `
+exports[`default-config Returns correct values for BIOMAGE production 1`] = `
 Object {
+  "adminSub": "032abd44-0cd3-4d58-af21-850ca0b95ac7",
   "api": Object {
     "prefix": "/",
   },
-  "awsAccountId": "000000000000",
+  "awsAccountId": "242905224710",
   "awsRegion": "eu-west-1",
   "cachingEnabled": true,
   "clusterEnv": "production",
@@ -35,3 +36,39 @@ Object {
   "workerVersion": 3,
 }
 `;
+
+exports[`default-config Returns correct values for HMS production 1`] = `
+Object {
+  "adminSub": "a01e8bcc-c9a2-4c56-bd66-39de93764be8",
+  "api": Object {
+    "prefix": "/",
+  },
+  "awsAccountId": "160782110667",
+  "awsRegion": "eu-west-1",
+  "cachingEnabled": true,
+  "clusterEnv": "production",
+  "cognitoISP": mockConstructor {
+    "CALL_EVENTS_BUBBLE": [MockFunction],
+    "MONITOR_EVENTS_BUBBLE": [MockFunction],
+  },
+  "corsOriginUrl": Array [
+    "https://sandbox.elabjournal.com",
+    "https://elabjournal.com",
+    "https://localhost:5000",
+  ],
+  "datadogApiKey": "",
+  "datadogAppKey": "",
+  "domainName": "localhost:5000",
+  "emailDomainName": "https://localhost:5000",
+  "pipelineIgnoreSSLCertificate": false,
+  "pipelineInstanceConfigUrl": "https://raw.githubusercontent.com/hms-dbmi-cellenics/releases/master/production/pipeline.yaml",
+  "pipelineNamespace": "pipeline-default",
+  "podName": "local",
+  "port": 3000,
+  "publicApiUrl": "https://api.localhost:5000",
+  "rdsSandboxId": "default",
+  "sandboxId": "default",
+  "workerNamespace": "worker-default",
+  "workerVersion": 3,
+}
+`;
diff --git a/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap b/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap
index 5f177824a..beab39490 100644
--- a/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap
+++ b/tests/config/__snapshots__/default-config-staging-sandboxid.test.js.snap
@@ -2,6 +2,7 @@
 
 exports[`default-config Returns correct values for staging sandboxid 1`] = `
 Object {
+  "adminSub": "0b17683f-363b-4466-b2e2-5bf11c38a76e",
   "api": Object {
     "prefix": "/",
   },
diff --git a/tests/config/__snapshots__/default-config-staging.test.js.snap b/tests/config/__snapshots__/default-config-staging.test.js.snap
index 9f3dae9be..51101ce4a 100644
--- a/tests/config/__snapshots__/default-config-staging.test.js.snap
+++ b/tests/config/__snapshots__/default-config-staging.test.js.snap
@@ -2,6 +2,7 @@
 
 exports[`default-config Returns correct values for staging default 1`] = `
 Object {
+  "adminSub": "0b17683f-363b-4466-b2e2-5bf11c38a76e",
   "api": Object {
     "prefix": "/",
   },

From 2d5e6f2eda200d4fbd0832b1b79bab1642367d67 Mon Sep 17 00:00:00 2001
From: Alex Pickering <alexvpickering@gmail.com>
Date: Mon, 27 Mar 2023 09:30:38 -0700
Subject: [PATCH 63/65] try rename last_gem2s_params

Signed-off-by: Alex Pickering <alexvpickering@gmail.com>
---
 .../20230117130009_add_last_gem2s_params.js           |  4 ++--
 .../20230117130009_add_last_pipeline_params.js        | 11 +++++++++++
 2 files changed, 13 insertions(+), 2 deletions(-)
 create mode 100644 src/sql/migrations/20230117130009_add_last_pipeline_params.js

diff --git a/src/sql/migrations/20230117130009_add_last_gem2s_params.js b/src/sql/migrations/20230117130009_add_last_gem2s_params.js
index 3d01a8340..3ef563a4f 100644
--- a/src/sql/migrations/20230117130009_add_last_gem2s_params.js
+++ b/src/sql/migrations/20230117130009_add_last_gem2s_params.js
@@ -1,11 +1,11 @@
 exports.up = async (knex) => {
   await knex.schema.alterTable('experiment_execution', (table) => {
-    table.jsonb('last_pipeline_params').nullable().defaultTo(null);
+    table.jsonb('last_gem2s_params').nullable().defaultTo(null);
   });
 };
 
 exports.down = async (knex) => {
   await knex.schema.alterTable('experiment_execution', (table) => {
-    table.dropColumn('last_pipeline_params');
+    table.dropColumn('last_gem2s_params');
   });
 };
diff --git a/src/sql/migrations/20230117130009_add_last_pipeline_params.js b/src/sql/migrations/20230117130009_add_last_pipeline_params.js
new file mode 100644
index 000000000..bc3767b80
--- /dev/null
+++ b/src/sql/migrations/20230117130009_add_last_pipeline_params.js
@@ -0,0 +1,11 @@
+exports.up = async (knex) => {
+  await knex.schema.renameColumn('experiment_execution', (table) => {
+    table.renameColumn('last_gem2s_params', 'last_pipeline_params');
+  });
+};
+
+exports.down = async (knex) => {
+  await knex.schema.renameColumn('experiment_execution', (table) => {
+    table.renameColumn('last_pipeline_params', 'last_gem2s_params');
+  });
+};

From dbc8dc9af7c64336fba391e36cd4623d8ed337e4 Mon Sep 17 00:00:00 2001
From: Alex Pickering <alexvpickering@gmail.com>
Date: Mon, 27 Mar 2023 09:32:38 -0700
Subject: [PATCH 64/65] fix migration

Signed-off-by: Alex Pickering <alexvpickering@gmail.com>
---
 src/sql/migrations/20230117130009_add_last_pipeline_params.js | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/src/sql/migrations/20230117130009_add_last_pipeline_params.js b/src/sql/migrations/20230117130009_add_last_pipeline_params.js
index bc3767b80..f5fb7538d 100644
--- a/src/sql/migrations/20230117130009_add_last_pipeline_params.js
+++ b/src/sql/migrations/20230117130009_add_last_pipeline_params.js
@@ -1,11 +1,11 @@
 exports.up = async (knex) => {
-  await knex.schema.renameColumn('experiment_execution', (table) => {
+  await knex.schema.alterTable('experiment_execution', (table) => {
     table.renameColumn('last_gem2s_params', 'last_pipeline_params');
   });
 };
 
 exports.down = async (knex) => {
-  await knex.schema.renameColumn('experiment_execution', (table) => {
+  await knex.schema.alterTable('experiment_execution', (table) => {
     table.renameColumn('last_pipeline_params', 'last_gem2s_params');
   });
 };

From b3c8edd77af442f564eee9aa9eb19a8cd58a0210 Mon Sep 17 00:00:00 2001
From: Alex Pickering <alexvpickering@gmail.com>
Date: Tue, 28 Mar 2023 09:10:32 -0700
Subject: [PATCH 65/65] fix refs

Signed-off-by: Alex Pickering <alexvpickering@gmail.com>
---
 .github/pull_request_template.md | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 5aeab71ee..eba0d222b 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -13,7 +13,7 @@
 #### Link to staging deployment URL (or set N/A)
 <!---
   Delete this comment and include the URL of the staging environment for this pull request.
-  Refer to https://github.com/hms-dbmi-cellenics/biomage-utils#stage on how to stage a staging environment.
+  Refer to https://github.com/hms-dbmi-cellenics/cellenics-utils#stage on how to stage a staging environment.
   If a staging environment for testing is not necessary for this PR, replace this comment with N/A 
   and explain why a staging environment is not required for this PR.
 
@@ -54,9 +54,9 @@ Have best practices and ongoing refactors being observed in this PR
 - [ ] Unit tests written **or** no unit tests required for change, e.g. documentation update.
 
 <!---
-  Download the latest production data using `biomage experiment pull`.
+  Download the latest production data using `cellenics experiment pull`.
   To set up easy local testing with inframock, follow the instructions here: https://github.com/hms-dbmi-cellenics/inframock
-  To deploy to the staging environment, follow the instructions here: https://github.com/hms-dbmi-cellenics/biomage-utils
+  To deploy to the staging environment, follow the instructions here: https://github.com/hms-dbmi-cellenics/cellenics-utils
 -->
 
 ### Integration testing