From 985bf4f5a9f333b4248008f4023d6a239c473dcd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Przemys=C5=82aw=20D=C4=85bek?= <373530+szemek@users.noreply.github.com> Date: Thu, 2 Feb 2023 13:44:53 +0100 Subject: [PATCH] Glue: batch_get_crawlers (#5896) --- moto/glue/models.py | 7 +++++++ moto/glue/responses.py | 13 +++++++++++++ tests/test_glue/test_glue.py | 13 +++++++++++++ 3 files changed, 33 insertions(+) diff --git a/moto/glue/models.py b/moto/glue/models.py index 26cfbbc0ee87..6b27c85dfef8 100644 --- a/moto/glue/models.py +++ b/moto/glue/models.py @@ -738,6 +738,13 @@ def batch_delete_partition(self, database_name, table_name, parts): ) return errors_output + def batch_get_crawlers(self, crawler_names): + crawlers = [] + for crawler in self.get_crawlers(): + if crawler.as_dict()["Name"] in crawler_names: + crawlers.append(crawler.as_dict()) + return crawlers + class FakeDatabase(BaseModel): def __init__(self, database_name, database_input): diff --git a/moto/glue/responses.py b/moto/glue/responses.py index 486e0bee7078..361848a3b752 100644 --- a/moto/glue/responses.py +++ b/moto/glue/responses.py @@ -498,3 +498,16 @@ def update_schema(self): description = self._get_param("Description") schema = self.glue_backend.update_schema(schema_id, compatibility, description) return json.dumps(schema) + + def batch_get_crawlers(self): + crawler_names = self._get_param("CrawlerNames") + crawlers = self.glue_backend.batch_get_crawlers(crawler_names) + crawlers_not_found = list( + set(crawler_names) - set(map(lambda crawler: crawler["Name"], crawlers)) + ) + return json.dumps( + { + "Crawlers": crawlers, + "CrawlersNotFound": crawlers_not_found, + } + ) diff --git a/tests/test_glue/test_glue.py b/tests/test_glue/test_glue.py index 5e3fcbfcef38..507b7affce8a 100644 --- a/tests/test_glue/test_glue.py +++ b/tests/test_glue/test_glue.py @@ -401,3 +401,16 @@ def test_untag_glue_crawler(): resp = client.get_tags(ResourceArn=resource_arn) resp.should.have.key("Tags").equals({"key1": "value1", "key3": "value3"}) + + +@mock_glue +def test_batch_get_crawlers(): + client = create_glue_client() + crawler_name = create_test_crawler(client) + + response = client.batch_get_crawlers( + CrawlerNames=[crawler_name, "crawler-not-found"] + ) + + response["Crawlers"].should.have.length_of(1) + response["CrawlersNotFound"].should.have.length_of(1)