Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Siddharth Manoj committed Nov 30, 2023
1 parent 658c1fb commit f627233
Showing 1 changed file with 17 additions and 9 deletions.
26 changes: 17 additions & 9 deletions tests/unit/sidecars/live_data_watcher_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,25 +19,25 @@
class NodeWatcherTests(unittest.TestCase):
mock_s3 = mock_s3()
bucket_name = "test_bucket"
s3_client = boto3.client(
"s3",
region_name="us-east-1",
)
default_file_key = "test_file_key"

def setUp(self):
self.mock_s3.start()
s3_client = boto3.client(
"s3",
region_name="us-east-1",
)
s3_data = {"foo_encrypted": "bar_encrypted"}

self.s3_client.create_bucket(Bucket=self.bucket_name)
s3_client.create_bucket(Bucket=self.bucket_name)
for file_shard_num in range(NUM_FILE_SHARDS):
if file_shard_num == 0:
# The first copy should just be the original file.
sharded_file_key = self.default_file_key
else:
# All other copies should include the sharded prefix.
sharded_file_key = str(file_shard_num) + "/" + self.default_file_key
self.s3_client.put_object(
s3_client.put_object(
Bucket=self.bucket_name,
Key=sharded_file_key,
Body=json.dumps(s3_data).encode(),
Expand Down Expand Up @@ -65,7 +65,11 @@ def test_s3_load_type_on_change(self):
self.assertEqual(dest.group(), grp.getgrgid(os.getgid()).gr_name)

# For additional good measure, let's also fetch the file from S3 and validate the contents.
obj = self.s3_client.get_object(Bucket=self.bucket_name, Key=self.default_file_key)
s3_client = boto3.client(
"s3",
region_name="us-east-1",
)
obj = s3_client.get_object(Bucket=self.bucket_name, Key=self.default_file_key)
actual_data = obj["Body"].read().decode("utf-8")
assert actual_data == json.loads(expected_content)

Expand All @@ -86,15 +90,19 @@ def test_s3_load_type_sharded_on_change(self):
self.assertEqual(dest.group(), grp.getgrgid(os.getgid()).gr_name)

# For additional good measure, let's also fetch the files from S3 and validate the contents.
obj = self.s3_client.get_object(Bucket=self.bucket_name, Key=self.default_file_key)
s3_client = boto3.client(
"s3",
region_name="us-east-1",
)
obj = s3_client.get_object(Bucket=self.bucket_name, Key=self.default_file_key)
actual_data = obj["Body"].read().decode("utf-8")
assert actual_data == json.loads(expected_content)

# Assert that all copies of the file are fetchable and contain the same
# data as the original.
for i in range(1, NUM_FILE_SHARDS):
file_key = str(i) + "/" + self.default_file_key
obj = self.s3_client.get_object(Bucket=self.bucket_name, Key=file_key)
obj = s3_client.get_object(Bucket=self.bucket_name, Key=file_key)
actual_data = obj["Body"].read().decode("utf-8")
assert actual_data == json.loads(expected_content)

Expand Down

0 comments on commit f627233

Please sign in to comment.