diff --git a/.gitignore b/.gitignore index d48b58d..af57b8c 100644 --- a/.gitignore +++ b/.gitignore @@ -132,5 +132,5 @@ dmypy.json #Pycharm .idea -tests/robot/new_tests.robot +tests/robot/local_tests.robot log/ diff --git a/CHANGELOG b/CHANGELOG deleted file mode 100644 index 60f59b3..0000000 --- a/CHANGELOG +++ /dev/null @@ -1,4 +0,0 @@ -* THUR Dec 12 Dillan Teagle -- added localstack for local aws services for developing the library -- added instructions to CONTRIBUTING.md -- organize dependencies with pip-tools \ No newline at end of file diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f3bc758..1c2f50d 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,5 +1,9 @@ # ***** UNDER CONSTRUCTION ***** +We are working to create an environment with tests in localstack https://github.com/localstack/localstack if you have +experience with this tool and time, any help will be appreciated. + + ## Contributing to RobotFramework-AWS Thank you for considering contributing to a library for interacting with AWS Services in RobotFramework for Test Automation. diff --git a/docs/AWSLibrary.html b/docs/AWSLibrary.html index 62b2802..25ac0ba 100644 --- a/docs/AWSLibrary.html +++ b/docs/AWSLibrary.html @@ -1191,7 +1191,7 @@ jQuery.extend({highlight:function(e,t,n,r){if(e.nodeType===3){var i=e.data.match(t);if(i){var s=document.createElement(n||"span");s.className=r||"highlight";var o=e.splitText(i.index);o.splitText(i[0].length);var u=o.cloneNode(true);s.appendChild(u);o.parentNode.replaceChild(s,o);return 1}}else if(e.nodeType===1&&e.childNodes&&!/(script|style)/i.test(e.tagName)&&!(e.tagName===n.toUpperCase()&&e.className===r)){for(var a=0;a diff --git a/docs/AWSLibrary.xml b/docs/AWSLibrary.xml index ec15644..68da458 100644 --- a/docs/AWSLibrary.xml +++ b/docs/AWSLibrary.xml @@ -1,6 +1,6 @@ - -1.0.0 + +0.2.0 AWSLibrary is a testing library for Robot Framework that gives you the ability to use some of the AWS services in your tests. This robot library is made from Boto3 SDK @@ -29,7 +29,129 @@ services in your tests. This robot library is made from Boto3 SDK - + + + +log_group + + +query + + +start_time +60 + + +Executes a query on CloudWatch Insights and return the found results in a list. + +| =Arguments= | =Description= | +| ``log_group`` | <str> Log group name. | +| ``query`` | <str> Aws query log format. | +| ``start_time`` | <str> The beginning of the time range to query from now to ago in minutes. | + +--- +Use the same aws console ``query`` format in the argument, like this examples: + +- Filter only by a part of the message, return the timestamp and the message: +| ``fields @timestamp, @message | filter @message like 'some string inside message to search' | sort @timestamp desc | limit 5`` +- Filter by json path and part of the message, return only the message: +| ``fields @message | filter API.httpMethod = 'GET' and @message like 'Zp8beEeByQ0EDvg' | sort @timestamp desc | limit 20`` +- Find the 10 most expensive requests: +| ``filter @type = "REPORT" | fields @requestId, @billedDuration | sort by @billedDuration desc | limit 10`` + +For more information, see CloudWatch Logs Insights Query Syntax. +https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/CWL_QuerySyntax.html +--- + +*Examples:* +| ${logs} | CloudWatch Logs Insights | /aws/group-name | query | +| ${logs} | CloudWatch Logs Insights | /aws/group-name | query | start_time=120 | +Executes a query on CloudWatch Insights and return the found results in a list. + + + + +url + + +The complete URL to use for the constructed CloudWatch client. Normally, botocore will automatically construct the +appropriate URL to use when communicating with a service. You can specify a complete URL +(including the “http/https” scheme) to override this behavior. + +| =Arguments= | =Description= | +| ``url`` | <str> The complete endpoint URL. | + +*Examples:* +| CloudWatch Set Endpoint Url | http://localhost:4566/ | +The complete URL to use for the constructed CloudWatch client. Normally, botocore will automatically construct the appropriate URL to use when communicating with a service. You can specify a complete URL (including the “http/https” scheme) to override this behavior. + + + + +log_group + + +filter_pattern + + +regex_pattern + + +seconds_behind +60 + + +timeout +30 + + +not_found_fail +False + + +Wait until find the wanted log in cloudwatch. + +This keyword is used to wait in real time if the desired log appears inside the informed log group. +It works in a similar way to the existing CloudWatch filter in "Live Tail". + +Return all the logs that match the informed regex in a list. + +| =Arguments= | =Description= | +| ``log_group`` | <str> Log group name. | +| ``filter_pattern`` | <str> Filter for CloudWatch. | +| ``regex_pattern`` | <str> Regex pattern to search in filter results. | +| ``seconds_behind`` | <str> How many seconds from now to ago, used to searching the logs. | +| ``timeout`` | <str> Timeout in seconds to end the search. | +| ``not_found_fail`` | <bool> If set as True, the keyword will fail if not find any log | + +--- +For ``filter_pattern`` use the same as aws console filter patterns in Live tail. +https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html + +- Filter for json path in log: +| {$.foo.bar = some_string_value} +- Filter for json path with null value in log: +| {$.foo.bar IS NULL} +- Filter for INFO logs: +| INFO +- Filter for DEBUG logs: +| DEBUG +- Filter for anything in logs: +| " " + +For ``regex_pattern`` use the same regular expressions that robot framework uses in BuildIn Library. +--- + +Note: as boto3 takes some time to get the logs and apply the regex query to each one of them, depending on the +amount of log found, the keyword execution time may be slightly longer than the timeout. + +*Examples:* +| ${logs} | CloudWatch Wait For Logs | /aws/group_name | {$.foo.bar = id_value} | 2024.*filename | +| ${logs} | CloudWatch Wait For Logs | /aws/group_name | INFO | \\d+.*id_code | timeout=60 | +| ${logs} | CloudWatch Wait For Logs | /aws/group_name | " " | \\w+.*some_code | not_found_fail=${True} | +Wait until find the wanted log in cloudwatch. + + bucket @@ -93,6 +215,21 @@ Creates S3 Bucket with the given bucket name | Create Session With Profile | us-west-1 | profile_name | Create an AWS session in region with your profile stored at ~/.aws/config. + + + +region + + +Create an AWS session in region using current role context. + +| =Arguments= | =Description= | +| ``region`` | <str> The AWS region name. | + +*Examples:* +| Create Session With Role | eu-west-1 | +Create an AWS session in region using current role context. + @@ -120,7 +257,7 @@ Creates S3 Bucket with the given bucket name | Create Session With Token | eu-west-1 | access_key | secret_key | token | Create an AWS session in region using access key, secret key and token. - + Delete all current sessions. @@ -129,7 +266,7 @@ Creates S3 Bucket with the given bucket name | Delete All Sessions | Delete all current sessions. - + bucket @@ -155,7 +292,7 @@ Deletes the file from a bucket | Delete File | bucket_name | folder/file.txt | *DEPRECATED - this keyword will be removed in version 2.0.0* use `S3 Delete File` instead - + region @@ -170,7 +307,7 @@ Deletes the file from a bucket | Delete Session | us-west-1 | Delete session by entering the region. - + bucket @@ -200,7 +337,7 @@ Download file from a bucket | Download File | bucket_name | folder/s3_file.txt | ${OUTPUTDIR}/file.txt | *DEPRECATED - this keyword will be removed in version 2.0.0* use `S3 Download File` instead - + table_name @@ -234,7 +371,7 @@ Download file from a bucket | Dynamo Delete Item | library-books | book_id | 123 | book_code | abc001 | Deletes a single item in a table by partition key (primary key) and sort key if provided. - + table_name @@ -255,7 +392,7 @@ existing item. | Update Item | library-books | {"key": "value"} | Creates a new item, or replaces an old item with a new item. If an item that has the same partition key (primary key) as the new item already exists in the specified table, the new item completely replaces the existing item. - + table_name @@ -299,7 +436,7 @@ https://boto3.amazonaws.com/v1/documentation/api/latest/_modules/boto3/dynamodb/ | Dynamo Query Table | library-books | book_id | 123 | projection=value | Queries a DynamoDB table based on the partition_key and his value. returns all the information found in a list of dictionaries. - + table_name @@ -329,14 +466,85 @@ https://boto3.amazonaws.com/v1/documentation/api/latest/_modules/boto3/dynamodb/ | ``partition_key`` | <str> The key to search. | | ``partition_value`` | <str> Value of the partition key. | | ``attribute_name`` | <str> Key to remove, for nested keys use . to compose the path. | +| ``sort_key`` | <str> (optional) The sort key to search. | +| ``sort_value`` | <str> (optional) Value of the sort key. | *Examples:* | Dynamo Remove Key | library-books | book_id | 123 | quantity | | Dynamo Remove Key | library-books | book_id | 123 | book.value | -| Dynamo Remove Key | library-books | book_id | 123 | book | sort_key=book_code | sort_value=abc001 | +| Dynamo Remove Key | library-books | book_id | 123 | quantity | sort_key=book_code | sort_value=abc001 | Removes a specific key in a DynamoDB item based on partition_key and sort key, if provided. - + + + +url + + +The complete URL to use for the constructed Dynamo client. Normally, botocore will automatically construct +the appropriate URL to use when communicating with a service. You can specify a complete URL +(including the “http/https” scheme) to override this behavior. + +| =Arguments= | =Description= | +| ``url`` | <str> The complete endpoint URL. | + +*Examples:* +| Dynamo Set Endpoint Url | http://localhost:4566/ | +The complete URL to use for the constructed Dynamo client. Normally, botocore will automatically construct the appropriate URL to use when communicating with a service. You can specify a complete URL (including the “http/https” scheme) to override this behavior. + + + + +table_name + + +partition_key + + +partition_value + + +attribute_name + + +attribute_value + + +sort_key +None + + +sort_value +None + + +Update a specific key in a DynamoDB item based on partition_key and sort key, if provided. + +Arguments: +- ``table_name``: name of the DynamoDB table. +- ``partition_key``: the partition key to search. +- ``value``: the value of partition key. +- ``attribute_name``: the key to update. For nested keys, use . to compose the path +- ``new_value``: the new value of the attribute_name. +- ``sort_key``: the sort key to search. Default as None +- ``sort_value``: the value of sort key. Default as None + +| =Arguments= | =Description= | +| ``table_name`` | <str> Name of the DynamoDB table. | +| ``partition_key`` | <str> The key to search. | +| ``partition_value`` | <str> Value of the partition key. | +| ``attribute_name`` | <str> Key to update. For nested keys, use . to compose the path. | +| ``attribute_value`` | <str> The new value of the attribute_name. | +| ``sort_key`` | <str> (optional) The sort key to search. | +| ``sort_value`` | <str> (optional) Value of the sort key. | + +*Examples:* +| Dynamo Update Key | library-books | book_id | 123 | quantity | 100 | +| Dynamo Update Key | library-books | book_id | 123 | book.value | 15 | +| Dynamo Update Key | library-books | book_id | 123 | quantity | 100 | sort_key=book_code | sort_value=abc001 | +Update a specific key in a DynamoDB item based on partition_key and sort key, if provided. + + bucket @@ -355,14 +563,14 @@ Check if the s3 object exist inside the bucket | =Arguments= | =Description= | | ``bucket`` | <str> The bucket name. | -| ``key`` | <str> complete s3 filepath. | +| ``key`` | <str> Complete s3 filepath. | *Examples:* | Key Should Exist | bucket_name | s3_file.txt | | Key Should Exist | bucket_name | folder/s3_file.txt | *DEPRECATED - this keyword will be removed in version 2.0.0* use `S3 Key Should Exist` instead - + bucket @@ -381,14 +589,14 @@ Check if the s3 object not exist inside the bucket | =Arguments= | =Description= | | ``bucket`` | <str> The bucket name. | -| ``key`` | <str> complete s3 filepath. | +| ``key`` | <str> Complete s3 filepath. | *Examples:* | Key Should Not Exist | bucket_name | s3_file.txt | | Key Should Not Exist | bucket_name | folder/s3_file.txt | *DEPRECATED - this keyword will be removed in version 2.0.0* use `S3 Key Should Not Exist` instead - + bucket @@ -443,7 +651,35 @@ Requires: @param: ```path``` which is the bucket location/path name. | Local File Should Not Exist | bucket | path | *DEPRECATED - this keyword will be removed in version 2.0.0* - + + + +source_bucket + + +source_key + + +destination_bucket + + +destination_key + + +Copy a file from a S3 bucket to another bucket. + +| =Arguments= | =Description= | +| ``source_bucket`` | <str> Source bucket name. | +| ``source_key`` | <str> Complete source s3 filepath. | +| ``destination_bucket`` | <str> Destination bucket name. | +| ``destination_key`` | <str> complete destination s3 filepath. | + +*Examples:* +| S3 Copy Between Buckets | source-bucket-name | file.json | destination-bucket-name | bkp_file.json | +| S3 Copy Between Buckets | source-bucket-name | folder/file.json | destination-bucket-name | backup_folder/bkp_file.json | +Copy a file from a S3 bucket to another bucket. + + bucket @@ -458,7 +694,7 @@ Requires: @param: ```path``` which is the bucket location/path name. | S3 Create bucket | bucket_name | Creates S3 Bucket with the given bucket name - + bucket @@ -471,14 +707,14 @@ Requires: @param: ```path``` which is the bucket location/path name. | =Arguments= | =Description= | | ``bucket`` | <str> The bucket name. | -| ``key`` | <str> complete s3 filepath. | +| ``key`` | <str> Complete s3 filepath. | *Examples:* | S3 Delete File | bucket_name | file.txt | | S3 Delete File | bucket_name | folder/file.txt | Deletes the file from a bucket - + bucket @@ -494,7 +730,7 @@ Requires: @param: ```path``` which is the bucket location/path name. | =Arguments= | =Description= | | ``bucket`` | <str> The bucket name. | -| ``key`` | <str> complete s3 filepath. | +| ``key`` | <str> Complete s3 filepath. | | ``local_path`` | <str> Complete local filepath. | *Examples:* @@ -502,7 +738,47 @@ Requires: @param: ```path``` which is the bucket location/path name. | S3 Download File | bucket_name | folder/s3_file.txt | ${OUTPUTDIR}/file.txt | Download file from a bucket - + + + +bucket + + +key + + +Get the file content in S3 bucket. + +| =Arguments= | =Description= | +| ``bucket`` | <str> The bucket name. | +| ``key`` | <str> Complete s3 filepath. | + +*Examples:* +| Get S3 File Content | bucket_name | s3_file.json | +| Get S3 File Content | bucket_name | folder_name/s3_file.txt | +Get the file content in S3 bucket. + + + + +bucket + + +key + + +Get the file metadata in S3 bucket. + +| =Arguments= | =Description= | +| ``bucket`` | <str> The bucket name. | +| ``key`` | <str> Complete s3 filepath. | + +*Examples:* +| Get S3 File Metadata | bucket_name | s3_file.json | +| Get S3 File Metadata | bucket_name | folder_name/s3_file.txt | +Get the file metadata in S3 bucket. + + bucket @@ -515,14 +791,14 @@ Requires: @param: ```path``` which is the bucket location/path name. | =Arguments= | =Description= | | ``bucket`` | <str> The bucket name. | -| ``key`` | <str> complete s3 filepath. | +| ``key`` | <str> Complete s3 filepath. | *Examples:* | S3 Key Should Exist | bucket_name | s3_file.txt | | S3 Key Should Exist | bucket_name | folder/s3_file.txt | Check if the s3 object exist inside the bucket - + bucket @@ -535,14 +811,14 @@ Requires: @param: ```path``` which is the bucket location/path name. | =Arguments= | =Description= | | ``bucket`` | <str> The bucket name. | -| ``key`` | <str> complete s3 filepath. | +| ``key`` | <str> Complete s3 filepath. | *Examples:* | S3 Key Should Not Exist | bucket_name | s3_file.txt | | S3 Key Should Not Exist | bucket_name | folder/s3_file.txt | Check if the s3 object not exist inside the bucket - + bucket @@ -567,7 +843,24 @@ could be used to list only the files inside a folder for example. | S3 List Objects | bucket_name | folder_name/start_of_the_filename | List objects in a bucket. (up to 1,000) each request - + + + +url + + +The complete URL to use for the constructed S3 client. Normally, botocore will automatically construct the +appropriate URL to use when communicating with a service. You can specify a complete URL +(including the “http/https” scheme) to override this behavior. + +| =Arguments= | =Description= | +| ``url`` | <str> The complete endpoint URL. | + +*Examples:* +| S3 Set Endpoint Url | http://localhost:4566/ | +The complete URL to use for the constructed S3 client. Normally, botocore will automatically construct the appropriate URL to use when communicating with a service. You can specify a complete URL (including the “http/https” scheme) to override this behavior. + + bucket @@ -583,7 +876,7 @@ could be used to list only the files inside a folder for example. | =Arguments= | =Description= | | ``bucket`` | <str> The bucket name. | -| ``key`` | <str> complete s3 filepath. | +| ``key`` | <str> Complete s3 filepath. | | ``local_path`` | <str> Complete local filepath. | *Examples:* @@ -591,7 +884,7 @@ could be used to list only the files inside a folder for example. | S3 Upload File | bucket_name | folder/s3_file.txt | ${CURDIR}/file.txt | Upload a file to the bucket - + bucket @@ -613,7 +906,7 @@ Upload a file to the bucket | =Arguments= | =Description= | | ``bucket`` | <str> The bucket name. | -| ``key`` | <str> complete s3 filepath. | +| ``key`` | <str> Complete s3 filepath. | | ``path`` | <str> Complete local filepath. | *Examples:* diff --git a/setup.py b/setup.py index c5a9a80..8fb8214 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ def readme(): setup( name='robotframework-aws', - version='0.1.0', + version='0.2.0', author="Dillan Teagle", author_email="softwaredeveloper@dillanteagle.me", description="A python package to test AWS services in Robot Framework", diff --git a/src/AWSLibrary/__init__.py b/src/AWSLibrary/__init__.py index 0fc9663..dab53a9 100644 --- a/src/AWSLibrary/__init__.py +++ b/src/AWSLibrary/__init__.py @@ -4,7 +4,8 @@ SessionKeywords, S3Keywords, ResourceKeywords, - DynamoKeywords + DynamoKeywords, + CloudWatchKeywords ) from AWSLibrary.version import get_version __version__ = get_version() @@ -45,5 +46,6 @@ def __init__(self): S3Keywords(self), ResourceKeywords(self), DynamoKeywords(self), + CloudWatchKeywords(self), ] DynamicCore.__init__(self, libraries) diff --git a/src/AWSLibrary/keywords/__init__.py b/src/AWSLibrary/keywords/__init__.py index 46def29..9b6babf 100644 --- a/src/AWSLibrary/keywords/__init__.py +++ b/src/AWSLibrary/keywords/__init__.py @@ -2,11 +2,13 @@ from AWSLibrary.keywords.s3 import S3Keywords from AWSLibrary.keywords.resource import ResourceKeywords from AWSLibrary.keywords.dynamo import DynamoKeywords +from AWSLibrary.keywords.cloudWatch import CloudWatchKeywords __all__ = [ SessionKeywords, S3Keywords, ResourceKeywords, - DynamoKeywords + DynamoKeywords, + CloudWatchKeywords ] diff --git a/src/AWSLibrary/keywords/cloudWatch.py b/src/AWSLibrary/keywords/cloudWatch.py new file mode 100644 index 0000000..1f4a162 --- /dev/null +++ b/src/AWSLibrary/keywords/cloudWatch.py @@ -0,0 +1,146 @@ +from AWSLibrary.librarycomponent import LibraryComponent +from robot.api.deco import keyword +from robot.api import logger +from datetime import datetime, timedelta +import time +import re + + +class CloudWatchKeywords(LibraryComponent): + + def __init__(self, library): + LibraryComponent.__init__(self, library) + self.endpoint_url = None + + @keyword('CloudWatch Set Endpoint Url') + def cloudwatch_set_endpoint(self, url): + """ The complete URL to use for the constructed CloudWatch client. Normally, botocore will automatically construct the + appropriate URL to use when communicating with a service. You can specify a complete URL + (including the “http/https” scheme) to override this behavior. + + | =Arguments= | =Description= | + | ``url`` | The complete endpoint URL. | + + *Examples:* + | CloudWatch Set Endpoint Url | http://localhost:4566/ | + """ + self.endpoint_url = url + + @keyword('CloudWatch Logs Insights') + def insights_query(self, log_group, query, start_time=60): + """Executes a query on CloudWatch Insights and return the found results in a list. + + | =Arguments= | =Description= | + | ``log_group`` | Log group name. | + | ``query`` | Aws query log format. | + | ``start_time`` | The beginning of the time range to query from now to ago in minutes. | + + --- + Use the same aws console ``query`` format in the argument, like this examples: + + - Filter only by a part of the message, return the timestamp and the message: + | ``fields @timestamp, @message | filter @message like 'some string inside message to search' | sort @timestamp desc | limit 5`` + - Filter by json path and part of the message, return only the message: + | ``fields @message | filter API.httpMethod = 'GET' and @message like 'Zp8beEeByQ0EDvg' | sort @timestamp desc | limit 20`` + - Find the 10 most expensive requests: + | ``filter @type = "REPORT" | fields @requestId, @billedDuration | sort by @billedDuration desc | limit 10`` + + For more information, see CloudWatch Logs Insights Query Syntax. + https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/CWL_QuerySyntax.html + --- + + *Examples:* + | ${logs} | CloudWatch Logs Insights | /aws/group-name | query | + | ${logs} | CloudWatch Logs Insights | /aws/group-name | query | start_time=120 | + """ + client = self.library.session.client('logs', endpoint_url=self.endpoint_url) + time_behind = (datetime.now() - timedelta(minutes=start_time)).timestamp() + query = client.start_query(logGroupName=log_group, + startTime=int(time_behind), + endTime=int(datetime.now().timestamp()), + queryString=query) + query_id = query['queryId'] + response = client.get_query_results(queryId=query_id) + while response['status'] == 'Running': + logger.debug("waiting for Logs Insights") + time.sleep(0.5) + response = client.get_query_results(queryId=query_id) + return response['results'] + + @keyword('CloudWatch Wait For Logs') + def wait_for_logs(self, log_group, filter_pattern, regex_pattern, seconds_behind=60, timeout=30, + not_found_fail=False): + """Wait until find the wanted log in cloudwatch. + + This keyword is used to wait in real time if the desired log appears inside the informed log group. + It works in a similar way to the existing CloudWatch filter in "Live Tail". + + Return all the logs that match the informed regex in a list. + + | =Arguments= | =Description= | + | ``log_group`` | Log group name. | + | ``filter_pattern`` | Filter for CloudWatch. | + | ``regex_pattern`` | Regex pattern to search in filter results. | + | ``seconds_behind`` | How many seconds from now to ago, used to searching the logs. | + | ``timeout`` | Timeout in seconds to end the search. | + | ``not_found_fail`` | If set as True, the keyword will fail if not find any log | + + --- + For ``filter_pattern`` use the same as aws console filter patterns in Live tail. + https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/FilterAndPatternSyntax.html + + - Filter for json path in log: + | {$.foo.bar = some_string_value} + - Filter for json path with null value in log: + | {$.foo.bar IS NULL} + - Filter for INFO logs: + | INFO + - Filter for DEBUG logs: + | DEBUG + - Filter for anything in logs: + | " " + + For ``regex_pattern`` use the same regular expressions that robot framework uses in BuildIn Library. + --- + + Note: as boto3 takes some time to get the logs and apply the regex query to each one of them, depending on the + amount of log found, the keyword execution time may be slightly longer than the timeout. + + *Examples:* + | ${logs} | CloudWatch Wait For Logs | /aws/group_name | {$.foo.bar = id_value} | 2024.*filename | + | ${logs} | CloudWatch Wait For Logs | /aws/group_name | INFO | \\\d+.*id_code | timeout=60 | + | ${logs} | CloudWatch Wait For Logs | /aws/group_name | " " | \\\w+.*some_code | not_found_fail=${True} | + """ + client = self.library.session.client('logs', endpoint_url=self.endpoint_url) + stream_response = client.describe_log_streams(logGroupName=log_group, + orderBy='LastEventTime', + descending=True, + limit=1) + latest_log_stream_name = stream_response["logStreams"][0]["logStreamName"] + logger.info("The latest stream is: %s" % latest_log_stream_name) + stream_response = client.describe_log_streams(logGroupName=log_group, + logStreamNamePrefix=latest_log_stream_name) + logger.debug(stream_response) + last_event = stream_response['logStreams'][0]['lastIngestionTime'] + logger.info("Last event: %s" % datetime.fromtimestamp(int(last_event) / 1000).strftime('%d-%m-%Y %H:%M:%S')) + last_event_delay = last_event - seconds_behind * 1000 + logger.info("Starting the log search from: %s" % datetime.fromtimestamp(int(last_event_delay) / 1000) + .strftime('%d-%m-%Y %H:%M:%S')) + events_match = [] + for i in range(int(timeout)): + response = client.filter_log_events(logGroupName=log_group, + startTime=last_event_delay, + filterPattern=filter_pattern) + logger.info("%s Total records found" % len(response["events"])) + logger.debug(response["events"]) + for event in response["events"]: + match_event = re.search(regex_pattern, event['message']) + if match_event: + events_match.append(event['message']) + if len(events_match) > 0: + break + else: + time.sleep(1) + if not_found_fail and len(events_match) == 0: + raise Exception(f"Log not found in CloudWatch inside {log_group} for {filter_pattern} and {regex_pattern}") + return events_match diff --git a/src/AWSLibrary/keywords/dynamo.py b/src/AWSLibrary/keywords/dynamo.py index 77e89b8..1a8e29f 100644 --- a/src/AWSLibrary/keywords/dynamo.py +++ b/src/AWSLibrary/keywords/dynamo.py @@ -8,9 +8,25 @@ class DynamoKeywords(LibraryComponent): def __init__(self, library): LibraryComponent.__init__(self, library) + self.endpoint_url = None + + @keyword('Dynamo Set Endpoint Url') + def dynamo_set_endpoint(self, url): + """ The complete URL to use for the constructed Dynamo client. Normally, botocore will automatically construct + the appropriate URL to use when communicating with a service. You can specify a complete URL + (including the “http/https” scheme) to override this behavior. + + | =Arguments= | =Description= | + | ``url`` | The complete endpoint URL. | + + *Examples:* + | Dynamo Set Endpoint Url | http://localhost:4566/ | + """ + self.endpoint_url = url @keyword('Dynamo Query Table') - def dynamo_query_table(self, table_name, partition_key, partition_value, sort_key=None, sort_value=None, projection=None): + def dynamo_query_table(self, table_name, partition_key, partition_value, sort_key=None, sort_value=None, + projection=None): """Queries a DynamoDB table based on the partition_key and his value. returns all the information found in a list of dictionaries. @@ -30,7 +46,7 @@ def dynamo_query_table(self, table_name, partition_key, partition_value, sort_ke | Dynamo Query Table | library-books | book_id | 123 | sort_key=book_code | sort_value=abc001 | | Dynamo Query Table | library-books | book_id | 123 | projection=value | """ - client = self.library.session.client('dynamodb') + client = self.library.session.client('dynamodb', endpoint_url=self.endpoint_url) if sort_key is None: expression = {':value': {'S': partition_value}} condition = f'{partition_key} = :value' @@ -69,7 +85,7 @@ def dynamo_update_item(self, table_name, json_dict): *Examples:* | Update Item | library-books | {"key": "value"} | """ - resource = self.library.session.resource('dynamodb') + resource = self.library.session.resource('dynamodb', endpoint_url=self.endpoint_url) response = resource.Table(table_name).put_item(Item=json_dict) logger.info(response) @@ -88,13 +104,14 @@ def dynamo_delete_item(self, table_name, partition_key, partition_value, sort_ke | Dynamo Delete Item | library-books | book_id | 123 | | Dynamo Delete Item | library-books | book_id | 123 | book_code | abc001 | """ - resource = self.library.session.resource('dynamodb') + resource = self.library.session.resource('dynamodb', endpoint_url=self.endpoint_url) key = {partition_key: partition_value, sort_key: sort_value} if sort_key else {partition_key: partition_value} response = resource.Table(table_name).delete_item(Key=key) logger.info(response) @keyword('Dynamo Remove Key') - def dynamo_remove_key(self, table_name, partition_key, partition_value, attribute_name, sort_key=None, sort_value=None): + def dynamo_remove_key(self, table_name, partition_key, partition_value, attribute_name, + sort_key=None, sort_value=None): """Removes a specific key in a DynamoDB item based on partition_key and sort key, if provided. | =Arguments= | =Description= | @@ -102,13 +119,15 @@ def dynamo_remove_key(self, table_name, partition_key, partition_value, attribut | ``partition_key`` | The key to search. | | ``partition_value`` | Value of the partition key. | | ``attribute_name`` | Key to remove, for nested keys use . to compose the path. | + | ``sort_key`` | (optional) The sort key to search. | + | ``sort_value`` | (optional) Value of the sort key. | *Examples:* | Dynamo Remove Key | library-books | book_id | 123 | quantity | | Dynamo Remove Key | library-books | book_id | 123 | book.value | - | Dynamo Remove Key | library-books | book_id | 123 | book | sort_key=book_code | sort_value=abc001 | + | Dynamo Remove Key | library-books | book_id | 123 | quantity | sort_key=book_code | sort_value=abc001 | """ - resource = self.library.session.resource('dynamodb') + resource = self.library.session.resource('dynamodb', endpoint_url=self.endpoint_url) expression, names = self._compose_expression(attribute_name, remove=True) logger.debug(f"UpdateExpression: {expression}") logger.debug(f"ExpressionAttributeNames: {names}") @@ -120,6 +139,47 @@ def dynamo_remove_key(self, table_name, partition_key, partition_value, attribut ) logger.info(response) + @keyword('Dynamo Update Key') + def dynamo_update_key(self, table_name, partition_key, partition_value, attribute_name, attribute_value, + sort_key=None, sort_value=None): + """Update a specific key in a DynamoDB item based on partition_key and sort key, if provided. + + Arguments: + - ``table_name``: name of the DynamoDB table. + - ``partition_key``: the partition key to search. + - ``value``: the value of partition key. + - ``attribute_name``: the key to update. For nested keys, use . to compose the path + - ``new_value``: the new value of the attribute_name. + - ``sort_key``: the sort key to search. Default as None + - ``sort_value``: the value of sort key. Default as None + + | =Arguments= | =Description= | + | ``table_name`` | Name of the DynamoDB table. | + | ``partition_key`` | The key to search. | + | ``partition_value`` | Value of the partition key. | + | ``attribute_name`` | Key to update. For nested keys, use . to compose the path. | + | ``attribute_value`` | The new value of the attribute_name. | + | ``sort_key`` | (optional) The sort key to search. | + | ``sort_value`` | (optional) Value of the sort key. | + + *Examples:* + | Dynamo Update Key | library-books | book_id | 123 | quantity | 100 | + | Dynamo Update Key | library-books | book_id | 123 | book.value | 15 | + | Dynamo Update Key | library-books | book_id | 123 | quantity | 100 | sort_key=book_code | sort_value=abc001 | + """ + resource = self.library.session.resource('dynamodb', endpoint_url=self.endpoint_url) + expression, names = self._compose_expression(attribute_name) + logger.debug(f"UpdateExpression: {expression}") + logger.debug(f"ExpressionAttributeNames: {names}") + key = {partition_key: partition_value, sort_key: sort_value} if sort_key else {partition_key: partition_value} + result = resource.Table(table_name).update_item( + Key=key, + UpdateExpression=expression, + ExpressionAttributeNames=names, + ExpressionAttributeValues={':new_value': attribute_value} + ) + return result + @staticmethod def _compose_expression(attribute, remove=False): if "." not in attribute: diff --git a/src/AWSLibrary/keywords/s3.py b/src/AWSLibrary/keywords/s3.py index e4543bb..b45bf6c 100644 --- a/src/AWSLibrary/keywords/s3.py +++ b/src/AWSLibrary/keywords/s3.py @@ -6,6 +6,10 @@ class S3Keywords(LibraryComponent): + def __init__(self, library): + LibraryComponent.__init__(self, library) + self.endpoint_url = None + # begin of deprecated keywords @keyword('Create Bucket') @@ -128,7 +132,7 @@ def upload_file(self, bucket, key, path, endpoint_url=None): | =Arguments= | =Description= | | ``bucket`` | The bucket name. | - | ``key`` | complete s3 filepath. | + | ``key`` | Complete s3 filepath. | | ``path`` | Complete local filepath. | *Examples:* @@ -156,7 +160,7 @@ def key_should_exist(self, bucket, key, endpoint_url=None): | =Arguments= | =Description= | | ``bucket`` | The bucket name. | - | ``key`` | complete s3 filepath. | + | ``key`` | Complete s3 filepath. | *Examples:* | Key Should Exist | bucket_name | s3_file.txt | @@ -179,7 +183,7 @@ def key_should_not_exist(self, bucket, key, endpoint_url=None): | =Arguments= | =Description= | | ``bucket`` | The bucket name. | - | ``key`` | complete s3 filepath. | + | ``key`` | Complete s3 filepath. | *Examples:* | Key Should Not Exist | bucket_name | s3_file.txt | @@ -196,6 +200,19 @@ def key_should_not_exist(self, bucket, key, endpoint_url=None): raise Exception(e) # end of deprecated keywords + @keyword('S3 Set Endpoint Url') + def s3_set_endpoint(self, url): + """ The complete URL to use for the constructed S3 client. Normally, botocore will automatically construct the + appropriate URL to use when communicating with a service. You can specify a complete URL + (including the “http/https” scheme) to override this behavior. + + | =Arguments= | =Description= | + | ``url`` | The complete endpoint URL. | + + *Examples:* + | S3 Set Endpoint Url | http://localhost:4566/ | + """ + self.endpoint_url = url @keyword('S3 Create Bucket') def s3_create_bucket(self, bucket): @@ -238,7 +255,7 @@ def s3_list_objects(self, bucket, prefix=""): | S3 List Objects | bucket_name | folder_name | | S3 List Objects | bucket_name | folder_name/start_of_the_filename | """ - client = self.library.session.client('s3') + client = self.library.session.client('s3', endpoint_url=self.endpoint_url) try: response = client.list_objects_v2( Bucket=bucket, @@ -257,13 +274,13 @@ def s3_delete_file(self, bucket, key): | =Arguments= | =Description= | | ``bucket`` | The bucket name. | - | ``key`` | complete s3 filepath. | + | ``key`` | Complete s3 filepath. | *Examples:* | S3 Delete File | bucket_name | file.txt | | S3 Delete File | bucket_name | folder/file.txt | """ - client = self.library.session.client('s3') + client = self.library.session.client('s3', endpoint_url=self.endpoint_url) try: response = client.delete_object( Bucket=bucket, @@ -279,14 +296,14 @@ def s3_download_file_from_s3(self, bucket, key, local_filepath): | =Arguments= | =Description= | | ``bucket`` | The bucket name. | - | ``key`` | complete s3 filepath. | + | ``key`` | Complete s3 filepath. | | ``local_path`` | Complete local filepath. | *Examples:* | S3 Download File | bucket_name | s3_file.txt | ${OUTPUTDIR}/file.txt | | S3 Download File | bucket_name | folder/s3_file.txt | ${OUTPUTDIR}/file.txt | """ - client = self.library.session.client('s3') + client = self.library.session.client('s3', endpoint_url=self.endpoint_url) try: client.download_file(bucket, key, local_filepath) except botocore.exceptions.ClientError as e: @@ -298,14 +315,14 @@ def s3_upload_file(self, bucket, key, local_path): | =Arguments= | =Description= | | ``bucket`` | The bucket name. | - | ``key`` | complete s3 filepath. | + | ``key`` | Complete s3 filepath. | | ``local_path`` | Complete local filepath. | *Examples:* | S3 Upload File | bucket_name | s3_file.txt | ${CURDIR}/file.txt | | S3 Upload File | bucket_name | folder/s3_file.txt | ${CURDIR}/file.txt | """ - client = self.library.session.client('s3') + client = self.library.session.client('s3', endpoint_url=self.endpoint_url) try: client.upload_file(local_path, bucket, key) response = client.head_object( @@ -322,16 +339,17 @@ def s3_key_should_exist(self, bucket, key): | =Arguments= | =Description= | | ``bucket`` | The bucket name. | - | ``key`` | complete s3 filepath. | + | ``key`` | Complete s3 filepath. | *Examples:* | S3 Key Should Exist | bucket_name | s3_file.txt | | S3 Key Should Exist | bucket_name | folder/s3_file.txt | """ - client = self.library.session.client("s3") + client = self.library.session.client("s3", endpoint_url=self.endpoint_url) try: client.head_object(Bucket=bucket, Key=key) - except botocore.exceptions.ClientError: + except botocore.exceptions.ClientError as e: + logger.info(e) raise Exception(f"Key: {key} does not exist inside {bucket}") @keyword('S3 Key Should Not Exist') @@ -340,13 +358,13 @@ def s3_key_should_not_exist(self, bucket, key): | =Arguments= | =Description= | | ``bucket`` | The bucket name. | - | ``key`` | complete s3 filepath. | + | ``key`` | Complete s3 filepath. | *Examples:* | S3 Key Should Not Exist | bucket_name | s3_file.txt | | S3 Key Should Not Exist | bucket_name | folder/s3_file.txt | """ - client = self.library.session.client('s3') + client = self.library.session.client('s3', endpoint_url=self.endpoint_url) try: response = client.head_object(Bucket=bucket, Key=key) if response['ResponseMetadata']['HTTPStatusCode'] == 200: @@ -354,3 +372,61 @@ def s3_key_should_not_exist(self, bucket, key): except botocore.exceptions.ClientError as e: # noqa if e.response['ResponseMetadata']['HTTPStatusCode'] != 404: raise Exception(e) + + @keyword('S3 Get File Content') + def s3_get_content(self, bucket, key): + """ Get the file content in S3 bucket. + + | =Arguments= | =Description= | + | ``bucket`` | The bucket name. | + | ``key`` | Complete s3 filepath. | + + *Examples:* + | Get S3 File Content | bucket_name | s3_file.json | + | Get S3 File Content | bucket_name | folder_name/s3_file.txt | + """ + client = self.library.session.client('s3', endpoint_url=self.endpoint_url) + try: + s3_object = client.get_object(Bucket=bucket, Key=key) + except botocore.exceptions.ClientError as e: + logger.info(e) + raise Exception(f"Key: {key} does not exist inside {bucket}") + return s3_object['Body'].read() + + @keyword('S3 Get File Metadata') + def s3_get_metadata(self, bucket, key): + """ Get the file metadata in S3 bucket. + + | =Arguments= | =Description= | + | ``bucket`` | The bucket name. | + | ``key`` | Complete s3 filepath. | + + *Examples:* + | Get S3 File Metadata | bucket_name | s3_file.json | + | Get S3 File Metadata | bucket_name | folder_name/s3_file.txt | + """ + client = self.library.session.client('s3', endpoint_url=self.endpoint_url) + try: + metadata = client.head_object(Bucket=bucket, Key=key) + except botocore.exceptions.ClientError as e: + logger.info(e) + raise Exception(f"Key: {key} does not exist inside {bucket}") + return metadata + + @keyword('S3 Copy Between Buckets') + def s3_copy_file(self, source_bucket, source_key, destination_bucket, destination_key): + """ Copy a file from a S3 bucket to another bucket. + + | =Arguments= | =Description= | + | ``source_bucket`` | Source bucket name. | + | ``source_key`` | Complete source s3 filepath. | + | ``destination_bucket`` | Destination bucket name. | + | ``destination_key`` | complete destination s3 filepath. | + + *Examples:* + | S3 Copy Between Buckets | source-bucket-name | file.json | destination-bucket-name | bkp_file.json | + | S3 Copy Between Buckets | source-bucket-name | folder/file.json | destination-bucket-name | backup_folder/bkp_file.json | + """ + client = self.library.session.client('s3', endpoint_url=self.endpoint_url) + copy_source = {'Bucket': source_bucket, 'Key': source_key} + client.copy(copy_source, destination_bucket, destination_key) diff --git a/src/AWSLibrary/keywords/session.py b/src/AWSLibrary/keywords/session.py index 748d077..9dcf785 100644 --- a/src/AWSLibrary/keywords/session.py +++ b/src/AWSLibrary/keywords/session.py @@ -73,7 +73,23 @@ def create_session_with_profile(self, region, profile): profile_name=profile, region_name=region ) - logger.info(f"Session created: {str(session)} using prifile: {profile}") + logger.info(f"Session created: {str(session)} using profile: {profile}") + self._cache.register(session, alias=region) + self.library.session = session + return session + + @keyword('Create Session With Role') + def create_session_with_role(self, region): + """ Create an AWS session in region using current role context. + + | =Arguments= | =Description= | + | ``region`` | The AWS region name. | + + *Examples:* + | Create Session With Role | eu-west-1 | + """ + session = boto3.Session(region_name=region) + logger.info(f"Session created: {str(session)} using current role context") self._cache.register(session, alias=region) self.library.session = session return session diff --git a/src/AWSLibrary/version.py b/src/AWSLibrary/version.py index 0e58ac0..3526155 100644 --- a/src/AWSLibrary/version.py +++ b/src/AWSLibrary/version.py @@ -1,4 +1,4 @@ -VERSION = '0.1.0' +VERSION = '0.2.0' def get_version(): diff --git a/tests/robot/static/downloaded_test.html b/tests/robot/static/downloaded_test.html deleted file mode 100644 index cb5fd7c..0000000 --- a/tests/robot/static/downloaded_test.html +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - Document - - -

This is a test

- - \ No newline at end of file diff --git a/tests/robot/static/test.html b/tests/robot/static/test.html deleted file mode 100644 index cb5fd7c..0000000 --- a/tests/robot/static/test.html +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - Document - - -

This is a test

- - \ No newline at end of file diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/unit/playground.py b/tests/unit/playground.py deleted file mode 100644 index 6f37d30..0000000 --- a/tests/unit/playground.py +++ /dev/null @@ -1,51 +0,0 @@ -# flake8: noqa - -from robot.utils import ConnectionCache -import os -from AWSLibrary import AWSLibrary -from AWSLibrary.keywords import SessionKeywords -import boto3 - -import unittest - -from AWSLibrary import AWSLibrary - - -class KeywordMethods: - - @classmethod - def setup(cls): - cls.aws = AWSLibrary() - - def kw_method_name(cls): - print(cls.aws.keywords) - -from os import getenv -a = getenv('ACCESS_KEY') -b = getenv('SECRET_KEY') - -def main(): - r = KeywordMethods.setup() - r.kw_method_name() - # lib = AWSLibrary.run_keyword(SessionKeywords, 'us-east-1', a, b) - # s = lib.create_session_with_keys("us-east-1", a, b) - - # lib.download_file_from_s3("zappastaticbin", "test.html", "static/downloaded_test.html") - - -if __name__ == "__main__": - main() - - - - -# class TestSession(unittest.TestCase): - -# def setUp(self): -# SessionKeywords.__bases__ = (Fake.imitate(AWSLibrary, LibraryComponent),) - -# def test_works(self): - -# def test_create_session_with_keys(self, monkeypatch): -# with patch('AWSLibrary.keywords.session.boto3') as mock_session: -# mock_session.Session = "Session(region_name='us-east-1')" diff --git a/tests/unit/test_kw_acceptance.py b/tests/unit/test_kw_acceptance.py deleted file mode 100644 index 811a6ec..0000000 --- a/tests/unit/test_kw_acceptance.py +++ /dev/null @@ -1,14 +0,0 @@ -import unittest - -from AWSLibrary import AWSLibrary - - -class TestKeywordMethods(unittest.TestCase): - - @classmethod - def setUpClass(cls): - cls.aws = AWSLibrary() - - def test_kw_method_name(self): - self.assertTrue(self.aws.keywords['Create Session With Keys']) - self.assertTrue(self.aws.attributes['create_session_with_keys']) \ No newline at end of file diff --git a/tests/unit/test_s3.py b/tests/unit/test_s3.py deleted file mode 100644 index 2f67319..0000000 --- a/tests/unit/test_s3.py +++ /dev/null @@ -1,12 +0,0 @@ -from AWSLibrary.keywords import S3Keywords -from AWSLibrary.base import LibraryComponent -from unittest.mock import patch, MagicMock -import unittest - - -# def test_upload_file_to_bucket(self): -# mock_gobject = MagicMock() -# mock_gobject.LibraryComponent.__bases__ = (object,) -# with patch.dict('sys.modules', gobject=mock_gobject): -# kw = S3Keywords(mock_gobject) -# kw.upload_file(self.bucket, self.key, self.path) \ No newline at end of file diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py deleted file mode 100644 index 4085d88..0000000 --- a/tests/unit/test_session.py +++ /dev/null @@ -1,42 +0,0 @@ -import unittest -import os -import sys -from unittest.mock import patch, MagicMock -testdir = os.path.dirname(__file__) -srcdir = '../src' -sys.path.insert(0, os.path.abspath(os.path.join(testdir, srcdir))) -from AWSLibrary import AWSLibrary -from AWSLibrary.keywords import SessionKeywords -from AWSLibrary.base import LibraryComponent, DynamicCore -from os import getenv -from boto3.session import Session - - -class TestSession(unittest.TestCase): - - def test_create_session_with_keys(self): - mock_gobject = MagicMock() - mock_gobject.LibraryComponent.__bases__ = (object,) - with patch.dict('sys.modules', gobject=mock_gobject): - kw = SessionKeywords(mock_gobject) - lib_session = kw.create_session_with_keys( - 'us-east-1', - getenv('ACCESS_KEY'), - getenv('ACCESS_KEY')) - - with patch('AWSLibrary.keywords.session.boto3') as mock_session: - ms = mock_session.Session = Session(region_name='us-east-1') - - self.assertEquals(str(lib_session), str(ms)) - - def test_create_session_with_profile(self): - mock_gobject = MagicMock() - mock_gobject.LibraryComponent.__bases__ = (object,) - with patch.dict('sys.modules', gobject=mock_gobject): - kw = SessionKeywords(mock_gobject) - lib_session = kw.create_session_with_profile( - 'us-east-1', - getenv('PROFILE')) - with patch('AWSLibrary.keywords.session.boto3') as mock_session: - ms = mock_session.Session = Session(region_name='us-east-1') - self.assertEquals(str(lib_session), str(ms)) \ No newline at end of file