diff --git a/src/endpoint/s3/ops/s3_put_bucket_lifecycle.js b/src/endpoint/s3/ops/s3_put_bucket_lifecycle.js index df3a8ee0bc..1488644946 100644 --- a/src/endpoint/s3/ops/s3_put_bucket_lifecycle.js +++ b/src/endpoint/s3/ops/s3_put_bucket_lifecycle.js @@ -98,7 +98,7 @@ async function put_bucket_lifecycle(req) { // Check for duplicate ID in the rules if (id_set.has(current_rule.id)) { dbg.error('Rule ID must be unique. Found same ID for more than one rule: ', current_rule.id); - throw new S3Error(S3Error.InvalidArgument); + throw new S3Error({ ...S3Error.InvalidArgument, message: 'Rule ID must be unique. Found same ID for more than one rule'}); } id_set.add(current_rule.id); diff --git a/src/test/lifecycle/common.js b/src/test/lifecycle/common.js index b2addbea39..ea4c9d49df 100644 --- a/src/test/lifecycle/common.js +++ b/src/test/lifecycle/common.js @@ -548,8 +548,9 @@ exports.test_rule_duplicate_id = async function(Bucket, Key, s3) { try { await s3.putBucketLifecycleConfiguration(putLifecycleParams); + // if no error occurs, explicitly fail the test + assert.fail('Expected error for duplicate rule ID, but request was successful'); } catch (error) { assert(error.code === 'InvalidArgument', 'Expected InvalidArgument: duplicate ID found in the rules'); - console.log('Expected error received: each rule must have a unique ID, duplicate ID found'); } };