Skip to content

Commit

Permalink
use deleteObject to expire objects in S3c
Browse files Browse the repository at this point in the history
Zenko supports the "deleteObjectFromExpiration" API, which
sets the proper originOp in the metadata to trigger a nortification
when an object gets expired. S3C doesn't support that API, so we
switch to the standard deleteObject API in this case.

Issue: BB-556
  • Loading branch information
Kerkesni committed Oct 18, 2024
1 parent 31d9ca6 commit 6f323b8
Show file tree
Hide file tree
Showing 3 changed files with 123 additions and 26 deletions.
61 changes: 36 additions & 25 deletions extensions/lifecycle/tasks/LifecycleDeleteObjectTask.js
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,27 @@ class LifecycleDeleteObjectTask extends BackbeatTask {
return done();
}

_getS3Action(actionType, accountId) {
let reqMethod;
if (actionType === 'deleteObject') {
const backbeatClient = this.getBackbeatClient(accountId);
if (!backbeatClient) {
return null;
}
// Zenko supports the "deleteObjectFromExpiration" API, which
// sets the proper originOp in the metadata to trigger a
// nortification when an object gets expired.
if (typeof backbeatClient.deleteObjectFromExpiration === 'function') {
return backbeatClient.deleteObjectFromExpiration.bind(backbeatClient);
}
reqMethod = 'deleteObject';
} else {
reqMethod = 'abortMultipartUpload';
}
const client = this.getS3Client(accountId);
return client[reqMethod].bind(client);
}

_executeDelete(entry, startTime, log, done) {
const { accountId } = entry.getAttribute('target');

Expand All @@ -115,34 +136,24 @@ class LifecycleDeleteObjectTask extends BackbeatTask {
const transitionTime = entry.getAttribute('transitionTime');
const location = this.objectMD?.dataStoreName || entry.getAttribute('details.dataStoreName');
let req = null;
if (actionType === 'deleteObject') {
reqMethod = 'deleteObject';
const backbeatClient = this.getBackbeatClient(accountId);
if (!backbeatClient) {
log.error('failed to get backbeat client', { accountId });
return done(errors.InternalError
.customizeDescription('Unable to obtain client'));
}

LifecycleMetrics.onLifecycleStarted(log, 'expiration',
location, startTime - transitionTime);

req = backbeatClient.deleteObjectFromExpiration(reqParams);
} else if (actionType === 'deleteMPU') {
const s3Action = this._getS3Action(actionType, accountId);
if (!s3Action) {
log.error('failed to get s3 action', {
accountId,
actionType,
method: 'LifecycleDeleteObjectTask._executeDelete',
});
return done(errors.InternalError
.customizeDescription('Unable to obtain s3 action'));
}
LifecycleMetrics.onLifecycleStarted(log,
actionType === 'deleteMPU' ? 'expiration:mpu' : 'expiration',
location, startTime - transitionTime);
if (actionType === 'deleteMPU') {
reqParams.UploadId = entry.getAttribute('details.UploadId');
reqMethod = 'abortMultipartUpload';
const s3Client = this.getS3Client(accountId);
if (!s3Client) {
log.error('failed to get S3 client', { accountId });
return done(errors.InternalError
.customizeDescription('Unable to obtain client'));
}

LifecycleMetrics.onLifecycleStarted(log, 'expiration:mpu',
location, startTime - transitionTime);

req = s3Client[reqMethod](reqParams);
}
req = s3Action(reqParams);
attachReqUids(req, log);
return req.send(err => {
LifecycleMetrics.onS3Request(log, reqMethod, 'expiration', err);
Expand Down
76 changes: 76 additions & 0 deletions tests/unit/lifecycle/LifecycleDeleteObjectTask.spec.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
const assert = require('assert');
const sinon = require('sinon');
const { errors } = require('arsenal');
const werelogs = require('werelogs');
const { ObjectMD } = require('arsenal').models;
Expand Down Expand Up @@ -226,4 +227,79 @@ describe('LifecycleDeleteObjectTask', () => {
done();
});
});

it('should expire object using the deleteObjectFromExpiration method if supported', done => {
const entry = ActionQueueEntry.create('deleteObject')
.setAttribute('target.owner', 'testowner')
.setAttribute('target.bucket', 'testbucket')
.setAttribute('target.accountId', 'testid')
.setAttribute('target.key', 'testkey')
.setAttribute('target.version', 'testversion')
.setAttribute('details.lastModified', '2022-05-13T17:51:31.261Z');
s3Client.setResponse(null, {});
backbeatClient.setResponse(null, {});
task.processActionEntry(entry, err => {
assert.ifError(err);
assert.strictEqual(backbeatClient.times.deleteObjectFromExpiration, 1);
assert.strictEqual(s3Client.calls.deleteObject, 0);
done();
});
});

it('should expire object using the deleteObject method when not in Zenko', done => {
backbeatClient = new BackbeatClientMock({ isS3c: true });
sinon.stub(task, 'getBackbeatClient').returns(backbeatClient);
const entry = ActionQueueEntry.create('deleteObject')
.setAttribute('target.owner', 'testowner')
.setAttribute('target.bucket', 'testbucket')
.setAttribute('target.accountId', 'testid')
.setAttribute('target.key', 'testkey')
.setAttribute('target.version', 'testversion')
.setAttribute('details.lastModified', '2022-05-13T17:51:31.261Z');
s3Client.setResponse(null, {});
backbeatClient.setResponse(null, {});
task.processActionEntry(entry, err => {
assert.ifError(err);
assert.strictEqual(s3Client.calls.deleteObject, 1);
assert.strictEqual(backbeatClient.times.deleteObjectFromExpiration, 0);
done();
});
});

it('should abort an MPU using the abortMultipartUpload method', done => {
const entry = ActionQueueEntry.create('deleteMPU')
.setAttribute('target.owner', 'testowner')
.setAttribute('target.bucket', 'testbucket')
.setAttribute('target.accountId', 'testid')
.setAttribute('target.key', 'testkey')
.setAttribute('target.version', 'testversion')
.setAttribute('details.UploadId', 'someUploadId')
.setAttribute('details.lastModified', '2022-05-13T17:51:31.261Z');
s3Client.setResponse(null, {});
backbeatClient.setResponse(null, {});
task.processActionEntry(entry, err => {
assert.ifError(err);
assert.strictEqual(s3Client.calls.abortMultipartUpload, 1);
assert.strictEqual(s3Client.calls.deleteObject, 0);
assert.strictEqual(backbeatClient.times.deleteObjectFromExpiration, 0);
done();
});
});

it('should return an error when it can\'t get the BackbeatClinet', done => {
sinon.stub(task, 'getBackbeatClient').returns(null);
const entry = ActionQueueEntry.create('deleteObject')
.setAttribute('target.owner', 'testowner')
.setAttribute('target.bucket', 'testbucket')
.setAttribute('target.accountId', 'testid')
.setAttribute('target.key', 'testkey')
.setAttribute('target.version', 'testversion')
.setAttribute('details.lastModified', '2022-05-13T17:51:31.261Z');
s3Client.setResponse(null, {});
backbeatClient.setResponse(null, {});
task.processActionEntry(entry, err => {
assert(err);
done();
});
});
});
12 changes: 11 additions & 1 deletion tests/unit/mocks.js
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,16 @@ class MockRequestAPI extends EventEmitter {
}

class BackbeatClientMock {
constructor() {
constructor(params) {
this.response = null;
this.batchDeleteResponse = {};
this.times = {
batchDeleteResponse: 0,
deleteObjectFromExpiration: 0,
};
if (params?.isS3c) {
this.deleteObjectFromExpiration = undefined;
}
}

deleteObjectFromExpiration() {
Expand Down Expand Up @@ -186,6 +189,7 @@ class S3ClientMock {
headObject: 0,
deleteObject: 0,
deleteMultipartObject: 0,
abortMultipartUpload: 0,
};
}

Expand Down Expand Up @@ -218,6 +222,12 @@ class S3ClientMock {
this.assertRespIsSet();
return new S3RequestMock(this.response.error, this.response.data);
}

abortMultipartUpload() {
this.calls.abortMultipartUpload += 1;
this.assertRespIsSet();
return new S3RequestMock(this.response.error, this.response.data);
}
}

module.exports = {
Expand Down

0 comments on commit 6f323b8

Please sign in to comment.