diff --git a/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/README.md b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/README.md new file mode 100644 index 0000000..6d6b97a --- /dev/null +++ b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/README.md @@ -0,0 +1,68 @@ +# Restrict unsecured HTTP requests for S3 Bucket + +This job configures S3 bucket policy to deny unsecured HTTP traffic. + +### Applicable Rule + +##### Rule ID: +688d093c-3b8d-11eb-adc1-0242ac120002 + +##### Rule Name: +S3 bucket should allow only HTTPS requests + +## Getting Started + +### Prerequisites + +The provided AWS credential must have access to `s3:GetBucketPolicy` and `s3:PutBucketPolicy`. + +You may find the latest example policy file [here](minimum_policy.json) + +### Running the script + +You may run this script using following commands: +```shell script + pip install -r ../../requirements.txt + python3 aws_s3_bucket_policy_allow_https.py +``` + +## Running the tests +You may run test using following command under vss-remediation-worker-job-code-python directory: +```shell script + python3 -m pytest test +``` + +## Deployment +1. Provision a Virtual Machine +Create an EC2 instance to use for the worker. The minimum required specifications are 128 MB memory and 1/2 Core CPU. +2. Setup Docker +Install Docker on the newly provisioned EC2 instance. You can refer to the [docs here](https://docs.aws.amazon.com/AmazonECS/latest/developerguide/docker-basics.html) for more information. +3. Deploy the worker image +SSH into the EC2 instance and run the command below to deploy the worker image: +```shell script + docker run --rm -it --name worker \ + -e VSS_CLIENT_ID={ENTER CLIENT ID} + -e VSS_CLIENT_SECRET={ENTER CLIENT SECRET} \ + vmware/vss-remediation-worker:latest-python +``` + + +## Contributing +The Secure State team welcomes welcomes contributions from the community. If you wish to contribute code and you have not signed our contributor license agreement (CLA), our bot will update the issue when you open a Pull Request. For any questions about the CLA process, please refer to our [FAQ](https://cla.vmware.com/faq). +All contributions to this repository must be signed as described on that page. Your signature certifies that you wrote the patch or have the right to pass it on as an open-source patch. + +For more detailed information, refer to [CONTRIBUTING.md](../../../CONTRIBUTING.md). + +## Versioning + +We use [SemVer](http://semver.org/) for versioning. For the versions available, see the [tags on this repository](https://github.com/vmware-samples/secure-state-remediation-jobs/tags). + +## Authors + +* **VMware Secure State** - *Initial work* + +See also the list of [contributors](https://github.com/vmware-samples/secure-state-remediation-jobs/contributors) who participated in this project. + +## License + +This project is licensed under the Apache License - see the [LICENSE](https://github.com/vmware-samples/secure-state-remediation-jobs/blob/master/LICENSE.txt) file for details diff --git a/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/__init__.py b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/aws_s3_bucket_policy_allow_https.py b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/aws_s3_bucket_policy_allow_https.py new file mode 100644 index 0000000..e76ffb2 --- /dev/null +++ b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/aws_s3_bucket_policy_allow_https.py @@ -0,0 +1,117 @@ +# Copyright (c) 2020 VMware Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import json +import logging +import sys + +import boto3 + +logging.basicConfig(level=logging.INFO) + + +class S3AllowOnlyHttpsRequest: + def parse(self, payload): + """Parse payload received from Remediation Service. + + :param payload: JSON string containing parameters sent to the remediation job. + :type payload: str. + :returns: Dictionary of parsed parameters + :rtype: dict + :raises: Exception, JSONDecodeError + """ + remediation_entry = json.loads(payload) + notification_info = remediation_entry.get("notificationInfo", None) + finding_info = notification_info.get("FindingInfo", None) + bucket_name = finding_info.get("ObjectId", None) + object_chain = remediation_entry["notificationInfo"]["FindingInfo"][ + "ObjectChain" + ] + object_chain_dict = json.loads(object_chain) + cloud_account_id = object_chain_dict["cloudAccountId"] + region = finding_info.get("Region") + + if bucket_name is None: + logging.error("Missing parameters for 'BUCKET_NAME'.") + raise Exception("Missing parameters for 'BUCKET_NAME'.") + + logging.info("parsed params") + logging.info(f" bucket_name: {bucket_name}") + logging.info(f"cloud_account_id: {cloud_account_id}") + + return { + "cloud_account_id": cloud_account_id, + "bucket_name": bucket_name, + } + + def remediate(self, client, cloud_account_id, bucket_name): + """Configuring S3 bucket policy to deny unsecured HTTP traffic. + + :param client: Instance of the AWS boto3 client. + :param cloud_account_id: AWS Account Id. + :param bucket_name: Name of the bucket. + :type cloud_account_id: str. + :type bucket_name: str. + :returns: Integer signaling success or failure. + :rtype: int + :raises: botocore.exceptions.ClientError + """ + + logging.info("making api call to client.get_bucket_policy") + logging.info(f"Bucket_name: {bucket_name}") + bucket_policy = client.get_bucket_policy( + Bucket=bucket_name, ExpectedBucketOwner=cloud_account_id, + ) + policy = json.loads(bucket_policy["Policy"]) + statements = policy["Statement"] + # Policy Statement to restrict http requests + restrict_http = { + "Sid": "Restrict Non-https Requests", + "Effect": "Deny", + "Principal": "*", + "Action": "s3:GetObject", + "Resource": f"arn:aws:s3:::{bucket_name}/*", + "Condition": {"Bool": {"aws:SecureTransport": "false"}}, + } + statements.append(restrict_http) + logging.info("making api call to client.put_bucket_policy") + logging.info(f"Bucket_name: {bucket_name}") + client.put_bucket_policy( + Bucket=bucket_name, + Policy=json.dumps(policy), + ExpectedBucketOwner=cloud_account_id, + ) + logging.info(f"successfully executed remediation for bucket: {bucket_name}") + return 0 + + def run(self, args): + """Run the remediation job. + + :param args: List of arguments provided to the job. + :type args: list. + :returns: int + """ + params = self.parse(args[1]) + client = boto3.client("s3") + logging.info("acquired s3 client and parsed params - starting remediation") + rc = self.remediate(client=client, **params) + return rc + + +if __name__ == "__main__": + logging.info("aws_s3_bucket_policy_allow_https.py called - running now") + obj = S3AllowOnlyHttpsRequest() + obj.run(sys.argv) diff --git a/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/constraints.txt b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/constraints.txt new file mode 100644 index 0000000..68a9723 --- /dev/null +++ b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/constraints.txt @@ -0,0 +1,43 @@ +attrs==20.1.0 \ + --hash=sha256:0ef97238856430dcf9228e07f316aefc17e8939fc8507e18c6501b761ef1a42a \ + --hash=sha256:2867b7b9f8326499ab5b0e2d12801fa5c98842d2cbd22b35112ae04bf85b4dff +docutils==0.15.2 \ + --hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \ + --hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \ + --hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99 +iniconfig==1.1.1 \ + --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ + --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 +jmespath==0.10.0 \ + --hash=sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9 \ + --hash=sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f +more-itertools==8.4.0 \ + --hash=sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5 \ + --hash=sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2 +packaging==20.4 \ + --hash=sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8 \ + --hash=sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181 +pyparsing==2.4.7 \ + --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ + --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b +python-dateutil==2.8.1 \ + --hash=sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c \ + --hash=sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a +py==1.9.0 \ + --hash=sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2 \ + --hash=sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342 +pluggy==0.13.1 \ + --hash=sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0 \ + --hash=sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d +s3transfer==0.3.4 \ + --hash=sha256:1e28620e5b444652ed752cf87c7e0cb15b0e578972568c6609f0f18212f259ed \ + --hash=sha256:7fdddb4f22275cf1d32129e21f056337fd2a80b6ccef1664528145b72c49e6d2 +six==1.15.0 \ + --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \ + --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced +toml==0.10.1 \ + --hash=sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f \ + --hash=sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88 +urllib3==1.26.3 \ + --hash=sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80 \ + --hash=sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73 diff --git a/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/minimum_policy.json b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/minimum_policy.json new file mode 100644 index 0000000..f383140 --- /dev/null +++ b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/minimum_policy.json @@ -0,0 +1,14 @@ +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "S3RestrictHttpAccess", + "Effect": "Allow", + "Action": [ + "s3:GetBucketPolicy", + "s3:PutBucketPolicy" + ], + "Resource": "*" + } + ] +} diff --git a/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/requirements-dev.txt b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/requirements-dev.txt new file mode 100644 index 0000000..cf03a93 --- /dev/null +++ b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/requirements-dev.txt @@ -0,0 +1,9 @@ +-r requirements.txt +-c constraints.txt + +mock==4.0.2 \ + --hash=sha256:3f9b2c0196c60d21838f307f5825a7b86b678cedc58ab9e50a8988187b4d81e0 \ + --hash=sha256:dd33eb70232b6118298d516bbcecd26704689c386594f0f3c4f13867b2c56f72 +pytest==6.0.1 \ + --hash=sha256:85228d75db9f45e06e57ef9bf4429267f81ac7c0d742cc9ed63d09886a9fe6f4 \ + --hash=sha256:8b6007800c53fdacd5a5c192203f4e531eb2a1540ad9c752e052ec0f7143dbad diff --git a/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/requirements.txt b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/requirements.txt new file mode 100644 index 0000000..ae343fd --- /dev/null +++ b/remediation_worker/jobs/aws_s3_bucket_policy_allow_https/requirements.txt @@ -0,0 +1,6 @@ +boto3==1.16.60 \ + --hash=sha256:10e8d9b18a8ae15677e850c7240140b9539635a03098f01dfdd75b2042d15862 \ + --hash=sha256:aee742f2a2315244fb31a507f65d8809fcd0029508c0b12be8611ddd2075b666 +botocore==1.19.60 \ + --hash=sha256:423a1a9502bd7bc5db8c6e64f9374f64d8ac18e6b870278a9ff65f59d268cd58 \ + --hash=sha256:80dd615a34c7e2c73606070a9358f7b5c1cb0c9989348306c1c9ddff45bb6ebe diff --git a/test/unit/test_aws_s3_bucket_policy_allow_https.py b/test/unit/test_aws_s3_bucket_policy_allow_https.py new file mode 100644 index 0000000..b1604b9 --- /dev/null +++ b/test/unit/test_aws_s3_bucket_policy_allow_https.py @@ -0,0 +1,121 @@ +# Copyright (c) 2020 VMware Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import pytest +from mock import Mock + +from remediation_worker.jobs.aws_s3_bucket_policy_allow_https.aws_s3_bucket_policy_allow_https import ( + S3AllowOnlyHttpsRequest, +) + + +@pytest.fixture +def valid_payload(): + return """ +{ + "notificationInfo": { + "RuleId": "5c6cc5e103dcc90f363146cd", + "Service": "S3", + "FindingInfo": { + "FindingId": "d0431afd-b82e-4021-8aa6-ba3cf5c60ef7", + "ObjectId": "bucket_name", + "ObjectChain": "{\\"cloudAccountId\\":\\"cloud_account_id\\",\\"entityId\\":\\"AWS.S3.159636093902.us-west-2.Bucket.test-remediation\\",\\"entityName\\":\\"remediation\\",\\"entityType\\":\\"AWS.S3.Bucket\\",\\"lastUpdateTime\\":\\"2020-09-09T00:36:35.000Z\\",\\"partitionKey\\":\\"153894897389\\",\\"provider\\":\\"AWS\\",\\"region\\":\\"us-west-2\\",\\"service\\":\\"CloudTrail\\", \\"properties\\":[{\\"name\\":\\"S3BucketName\\",\\"stringV\\":\\"remediation-cloudtrail\\",\\"type\\":\\"string\\"}]}", + "Region": "region" + } + } +} +""" + + +class TestCloudtrailS3PublicAccess(object): + def test_parse_payload(self, valid_payload): + params = S3AllowOnlyHttpsRequest().parse(valid_payload) + assert params["bucket_name"] == "bucket_name" + assert params["cloud_account_id"] == "cloud_account_id" + + def test_remediate_success(self): + client = Mock() + action = S3AllowOnlyHttpsRequest() + action.create_key = Mock() + client.get_bucket_policy.return_value = { + "ResponseMetadata": { + "RequestId": "Z4MVPBGNWPZVDEM9", + "HostId": "kbdViazCnratDD68N8hqAJWktBu+gTI9WKnO2eQ6CIdKAUmUyBq7A23b/T61/3mOkfY6NXk2ens=", + "HTTPStatusCode": 200, + "HTTPHeaders": { + "x-amz-id-2": "kbdViazCnratDD68N8hqAJWktBu+gTI9WKnO2eQ6CIdKAUmUyBq7A23b/T61/3mOkfY6NXk2ens=", + "x-amz-request-id": "Z4MVPBGNWPZVDEM9", + "date": "Thu, 15 Apr 2021 13:41:24 GMT", + "content-type": "application/json", + "content-length": "662", + "server": "AmazonS3", + }, + "RetryAttempts": 1, + }, + "Policy": '{"Version":"2012-10-17","Statement":[{"Sid":"AWSCloudTrailAclCheck20150319","Effect":"Allow","Principal":{"Service":"cloudtrail.amazonaws.com"},"Action":"s3:GetBucketAcl","Resource":"arn:aws:s3:::mitrashtest"},{"Sid":"AWSCloudTrailWrite20150319","Effect":"Allow","Principal":{"Service":"cloudtrail.amazonaws.com"},"Action":"s3:PutObject","Resource":"arn:aws:s3:::mitrashtest/AWSLogs/159636093902/*","Condition":{"StringEquals":{"s3:x-amz-acl":"bucket-owner-full-control"}}},{"Sid":"Restrict Non-https Requests","Effect":"Deny","Principal":"*","Action":"s3:GetObject","Resource":"arn:aws:s3:::mitrashtest/*"}]}', + } + + Policy = { + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "AWSCloudTrailAclCheck20150319", + "Effect": "Allow", + "Principal": {"Service": "cloudtrail.amazonaws.com"}, + "Action": "s3:GetBucketAcl", + "Resource": "arn:aws:s3:::mitrashtest", + }, + { + "Sid": "AWSCloudTrailWrite20150319", + "Effect": "Allow", + "Principal": {"Service": "cloudtrail.amazonaws.com"}, + "Action": "s3:PutObject", + "Resource": "arn:aws:s3:::mitrashtest/AWSLogs/159636093902/*", + "Condition": { + "StringEquals": {"s3:x-amz-acl": "bucket-owner-full-control"} + }, + }, + { + "Sid": "Restrict Non-https Requests", + "Effect": "Deny", + "Principal": "*", + "Action": "s3:GetObject", + "Resource": "arn:aws:s3:::mitrashtest/*", + }, + { + "Sid": "Restrict Non-https Requests", + "Effect": "Deny", + "Principal": "*", + "Action": "s3:GetObject", + "Resource": "arn:aws:s3:::bucket_name/*", + "Condition": {"Bool": {"aws:SecureTransport": "false"}}, + }, + ], + } + + assert action.remediate(client, "cloud_account_id", "bucket_name") == 0 + assert client.get_bucket_policy.call_count == 1 + assert client.put_bucket_policy.call_count == 1 + call_args = client.put_bucket_policy.call_args + updated_policy = call_args[1]["Policy"] + assert updated_policy == json.dumps(Policy) + + def test_remediate_with_exception(self): + client = Mock() + action = S3AllowOnlyHttpsRequest() + with pytest.raises(Exception): + assert action.remediate( + client, "sg_name", "cloud_account_id", "bucket_name" + )