Skip to content

Commit

Permalink
Merge pull request #3855 from bjester/mo-patches
Browse files Browse the repository at this point in the history
Hotfixes to add new probers, address Sentry reporting issues, and other issues
  • Loading branch information
bjester authored Dec 2, 2022
2 parents 9d84374 + 8bee2fd commit 706d7d6
Show file tree
Hide file tree
Showing 18 changed files with 370 additions and 172 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -705,10 +705,11 @@
audioVideoFiles = this.nodeFiles.filter(file => this.allowedFileType(file));
// return the last item in the array
const file = audioVideoFiles[audioVideoFiles.length - 1];
return file.duration;
} else {
return null;
if (file) {
return file.duration;
}
}
return null;
},
videoSelected() {
return this.oneSelected && this.firstNode.kind === ContentKindsNames.VIDEO;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,8 @@
data-test="deploy-dialog"
:title="$tr('deployChannel')"
:submitText="$tr('confirmDeployBtn')"
:submitDisabled="submitDisabled"
:cancelDisabled="submitDisabled"
:cancelText="$tr('cancelDeployBtn')"
@submit="onDeployChannelClick"
@cancel="displayDeployDialog = false"
Expand Down Expand Up @@ -293,6 +295,7 @@
displayDeployDialog: false,
drawer: false,
elevated: false,
submitDisabled: false,
};
},
computed: {
Expand Down Expand Up @@ -504,7 +507,13 @@
this.elevated = e.target.scrollTop > 0;
},
async onDeployChannelClick() {
await this.deployCurrentChannel();
this.submitDisabled = true;
try {
await this.deployCurrentChannel();
} catch (e) {
this.submitDisabled = false;
throw e;
}
await this.loadChannel(this.currentChannel.id);
this.$router.push(this.rootTreeRoute);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,12 @@ export function deployCurrentChannel(context) {
let payload = {
channel_id: context.state.currentChannelId,
};
return client.post(window.Urls.activate_channel(), payload);
return client.post(window.Urls.activate_channel(), payload).catch(e => {
// If response is 'Bad request', channel must already be activated
if (e.response && e.response.status === 400) {
return Promise.resolve();
}
});
}

export function publishChannel(context, version_notes) {
Expand Down
20 changes: 17 additions & 3 deletions contentcuration/contentcuration/frontend/shared/client.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import omit from 'lodash/omit';
import axios from 'axios';
import qs from 'qs';
import * as Sentry from '@sentry/vue';
Expand Down Expand Up @@ -26,7 +27,15 @@ export function paramsSerializer(params) {
const client = axios.create({
xsrfCookieName: 'csrftoken',
xsrfHeaderName: 'X-CSRFToken',
paramsSerializer,
paramsSerializer: {
serializer: paramsSerializer,
},
});

// Track when the browser was last offline for error reporting purposes
let lastOffline = null;
window.addEventListener('offline', () => {
lastOffline = Date.now();
});

client.interceptors.response.use(
Expand Down Expand Up @@ -55,11 +64,12 @@ client.interceptors.response.use(
// In dev build log warnings to console for developer use
console.warn('AJAX Request Error: ' + message); // eslint-disable-line no-console
console.warn('Error data: ', error); // eslint-disable-line no-console
} else {
} else if (error.code !== 'ECONNABORTED') {
Sentry.withScope(function(scope) {
scope.addAttachment({
filename: 'error.json',
data: JSON.stringify(error),
// strip csrf token from headers
data: JSON.stringify(omit(error, ['config.headers.X-CSRFToken'])),
contentType: 'application/json',
});
Sentry.captureException(new Error(message), {
Expand All @@ -69,6 +79,10 @@ client.interceptors.response.use(
method: error.config.method,
url,
},
Network: {
lastOffline: lastOffline ? `${Date.now() - lastOffline}ms ago` : 'never',
online: navigator.onLine,
},
},
});
});
Expand Down
125 changes: 71 additions & 54 deletions contentcuration/contentcuration/frontend/shared/data/resources.js
Original file line number Diff line number Diff line change
Expand Up @@ -264,65 +264,82 @@ class IndexedDBResource {
CHANGES_TABLE,
() => {
// Get any relevant changes that would be overwritten by this bulkPut
return db[CHANGES_TABLE].where('[table+key]')
const changesPromise = db[CHANGES_TABLE].where('[table+key]')
.anyOf(itemData.map(datum => [this.tableName, this.getIdValue(datum)]))
.sortBy('rev', changes => {
changes = mergeAllChanges(changes, true);
const collectedChanges = collectChanges(changes)[this.tableName] || {};
for (let changeType of Object.keys(collectedChanges)) {
const map = {};
for (let change of collectedChanges[changeType]) {
map[change.key] = change;
}
collectedChanges[changeType] = map;
.sortBy('rev');
const currentPromise = this.table
.where(this.idField)
.anyOf(itemData.map(datum => this.getIdValue(datum)))
.toArray();

return Promise.all([changesPromise, currentPromise]).then(([changes, currents]) => {
changes = mergeAllChanges(changes, true);
const collectedChanges = collectChanges(changes)[this.tableName] || {};
for (let changeType of Object.keys(collectedChanges)) {
const map = {};
for (let change of collectedChanges[changeType]) {
map[change.key] = change;
}
const data = itemData
.map(datum => {
datum[LAST_FETCHED] = now;
const id = this.getIdValue(datum);
// If we have an updated change, apply the modifications here
if (
collectedChanges[CHANGE_TYPES.UPDATED] &&
collectedChanges[CHANGE_TYPES.UPDATED][id]
) {
applyMods(datum, collectedChanges[CHANGE_TYPES.UPDATED][id].mods);
collectedChanges[changeType] = map;
}
const currentMap = {};
for (let currentObj of currents) {
currentMap[this.getIdValue(currentObj)] = currentObj;
}
const data = itemData
.map(datum => {
const id = this.getIdValue(datum);
datum[LAST_FETCHED] = now;
// Persist TASK_ID and COPYING_FLAG attributes when directly fetching from the server
if (currentMap[id] && currentMap[id][TASK_ID]) {
datum[TASK_ID] = currentMap[id][TASK_ID];
}
if (currentMap[id] && currentMap[id][COPYING_FLAG]) {
datum[COPYING_FLAG] = currentMap[id][COPYING_FLAG];
}
// If we have an updated change, apply the modifications here
if (
collectedChanges[CHANGE_TYPES.UPDATED] &&
collectedChanges[CHANGE_TYPES.UPDATED][id]
) {
applyMods(datum, collectedChanges[CHANGE_TYPES.UPDATED][id].mods);
}
return datum;
// If we have a deleted change, just filter out this object so we don't reput it
})
.filter(
datum =>
!collectedChanges[CHANGE_TYPES.DELETED] ||
!collectedChanges[CHANGE_TYPES.DELETED][this.getIdValue(datum)]
);
return this.table.bulkPut(data).then(() => {
// Move changes need to be reapplied on top of fetched data in case anything
// has happened on the backend.
return applyChanges(Object.values(collectedChanges[CHANGE_TYPES.MOVED] || {})).then(
results => {
if (!results || !results.length) {
return data;
}
return datum;
// If we have a deleted change, just filter out this object so we don't reput it
})
.filter(
datum =>
!collectedChanges[CHANGE_TYPES.DELETED] ||
!collectedChanges[CHANGE_TYPES.DELETED][this.getIdValue(datum)]
);
return this.table.bulkPut(data).then(() => {
// Move changes need to be reapplied on top of fetched data in case anything
// has happened on the backend.
return applyChanges(Object.values(collectedChanges[CHANGE_TYPES.MOVED] || {})).then(
results => {
if (!results || !results.length) {
return data;
}
const resultsMap = {};
for (let result of results) {
const id = this.getIdValue(result);
resultsMap[id] = result;
}
return data
.map(datum => {
const id = this.getIdValue(datum);
if (resultsMap[id]) {
applyMods(datum, resultsMap[id]);
}
return datum;
// Concatenate any unsynced created objects onto
// the end of the returned objects
})
.concat(Object.values(collectedChanges[CHANGE_TYPES.CREATED]).map(c => c.obj));
const resultsMap = {};
for (let result of results) {
const id = this.getIdValue(result);
resultsMap[id] = result;
}
);
});
return data
.map(datum => {
const id = this.getIdValue(datum);
if (resultsMap[id]) {
applyMods(datum, resultsMap[id]);
}
return datum;
// Concatenate any unsynced created objects onto
// the end of the returned objects
})
.concat(Object.values(collectedChanges[CHANGE_TYPES.CREATED]).map(c => c.obj));
}
);
});
});
}
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
import FileDropzone from './FileDropzone';
import FileStorage from './FileStorage';
import { fileErrors, MAX_FILE_SIZE } from 'shared/constants';
import { MAX_FILE_SIZE } from 'shared/constants';
import { fileSizeMixin } from 'shared/mixins';
import Alert from 'shared/views/Alert';
import { FormatPresetsList } from 'shared/leUtils/FormatPresets';
Expand Down Expand Up @@ -193,16 +193,17 @@
} else if (this.tooLargeFiles.length) {
this.showTooLargeFilesAlert = true;
}
return this.handleUploads(files).then(fileObjects => {
const objects = fileObjects.map(f => f.fileObject);
if (fileObjects.length) {
for (let ret of fileObjects) {
const fileObject = ret.fileObject;
ret.promise.then(err => {
if (err !== fileErrors.UPLOAD_FAILED && isFunction(this.uploadCompleteHandler)) {
this.uploadCompleteHandler(this.getFileUpload(fileObject.id));
}
});
return this.handleUploads(files).then(fileUploads => {
const objects = fileUploads.map(f => f.fileObject).filter(f => !f.error);
if (fileUploads.length) {
for (let fileUpload of fileUploads) {
fileUpload.uploadPromise
.then(fileObject => {
if (isFunction(this.uploadCompleteHandler)) {
this.uploadCompleteHandler(this.getFileUpload(fileObject.id));
}
})
.catch(() => {});
}
if (isFunction(this.uploadingHandler)) {
this.uploadingHandler(this.allowMultiple ? objects : objects[0]);
Expand All @@ -220,9 +221,9 @@
// need to distinguish between presets with same extension
// (e.g. high res vs. low res videos)
[...files].map(file => this.uploadFile({ file, preset: this.presetID }).catch(() => null))
).then(fileObjects => {
).then(fileUploads => {
// Filter out any null values here
return fileObjects.filter(Boolean);
return fileUploads.filter(Boolean);
});
},
},
Expand Down
Loading

0 comments on commit 706d7d6

Please sign in to comment.