Skip to content

Commit

Permalink
Merge branch 'release-1.29.15' into develop
Browse files Browse the repository at this point in the history
* release-1.29.15:
  Bumping version to 1.29.15
  Update to latest partitions and endpoints
  Update to latest models
  • Loading branch information
aws-sdk-python-automation committed Nov 22, 2022
2 parents 7b4b3bb + 4d972ec commit a942b57
Show file tree
Hide file tree
Showing 13 changed files with 516 additions and 376 deletions.
22 changes: 22 additions & 0 deletions .changes/1.29.15.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
[
{
"category": "Endpoints",
"description": "Resolve endpoint with default partition when no region is set",
"type": "bugfix"
},
{
"category": "s3",
"description": "fixes missing x-amz-content-sha256 header for s3 object lambda",
"type": "bugfix"
},
{
"category": "``appflow``",
"description": "Adding support for Amazon AppFlow to transfer the data to Amazon Redshift databases through Amazon Redshift Data API service. This feature will support the Redshift destination connector on both public and private accessible Amazon Redshift Clusters and Amazon Redshift Serverless.",
"type": "api-change"
},
{
"category": "``kinesisanalyticsv2``",
"description": "Support for Apache Flink 1.15 in Kinesis Data Analytics.",
"type": "api-change"
}
]
5 changes: 0 additions & 5 deletions .changes/next-release/bugfix-Endpoints-16572.json

This file was deleted.

5 changes: 0 additions & 5 deletions .changes/next-release/bugfix-s3-26698.json

This file was deleted.

9 changes: 9 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,15 @@
CHANGELOG
=========

1.29.15
=======

* bugfix:Endpoints: Resolve endpoint with default partition when no region is set
* bugfix:s3: fixes missing x-amz-content-sha256 header for s3 object lambda
* api-change:``appflow``: Adding support for Amazon AppFlow to transfer the data to Amazon Redshift databases through Amazon Redshift Data API service. This feature will support the Redshift destination connector on both public and private accessible Amazon Redshift Clusters and Amazon Redshift Serverless.
* api-change:``kinesisanalyticsv2``: Support for Apache Flink 1.15 in Kinesis Data Analytics.


1.29.14
=======

Expand Down
2 changes: 1 addition & 1 deletion botocore/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import os
import re

__version__ = '1.29.14'
__version__ = '1.29.15'


class NullHandler(logging.Handler):
Expand Down
2 changes: 1 addition & 1 deletion botocore/data/appflow/2020-08-23/endpoint-rule-set-1.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"parameters": {
"Region": {
"builtIn": "AWS::Region",
"required": false,
"required": true,
"documentation": "The AWS region used to dispatch the request.",
"type": "String"
},
Expand Down
54 changes: 43 additions & 11 deletions botocore/data/appflow/2020-08-23/service-2.json
Original file line number Diff line number Diff line change
Expand Up @@ -683,6 +683,11 @@
"pattern":"\\S+",
"sensitive":true
},
"ClusterIdentifier":{
"type":"string",
"max":512,
"pattern":"\\S+"
},
"ConflictException":{
"type":"structure",
"members":{
Expand Down Expand Up @@ -1213,10 +1218,7 @@
},
"ConnectorProfileConfig":{
"type":"structure",
"required":[
"connectorProfileProperties",
"connectorProfileCredentials"
],
"required":["connectorProfileProperties"],
"members":{
"connectorProfileProperties":{
"shape":"ConnectorProfileProperties",
Expand Down Expand Up @@ -1802,13 +1804,23 @@
},
"documentation":"<p> The connector metadata specific to Amazon Connect Customer Profiles. </p>"
},
"DataApiRoleArn":{
"type":"string",
"max":512,
"pattern":"arn:aws:iam:.*:[0-9]+:.*"
},
"DataPullMode":{
"type":"string",
"enum":[
"Incremental",
"Complete"
]
},
"DatabaseName":{
"type":"string",
"max":512,
"pattern":"\\S+"
},
"DatabaseUrl":{
"type":"string",
"max":512,
Expand Down Expand Up @@ -3667,13 +3679,9 @@
},
"RedshiftConnectorProfileCredentials":{
"type":"structure",
"required":[
"username",
"password"
],
"members":{
"username":{
"shape":"Username",
"shape":"String",
"documentation":"<p> The name of the user. </p>"
},
"password":{
Expand All @@ -3686,7 +3694,6 @@
"RedshiftConnectorProfileProperties":{
"type":"structure",
"required":[
"databaseUrl",
"bucketName",
"roleArn"
],
Expand All @@ -3705,7 +3712,27 @@
},
"roleArn":{
"shape":"RoleArn",
"documentation":"<p> The Amazon Resource Name (ARN) of the IAM role. </p>"
"documentation":"<p> The Amazon Resource Name (ARN) of IAM role that grants Amazon Redshift read-only access to Amazon S3. For more information, and for the polices that you attach to this role, see <a href=\"https://docs.aws.amazon.com/appflow/latest/userguide/security_iam_service-role-policies.html#redshift-access-s3\">Allow Amazon Redshift to access your Amazon AppFlow data in Amazon S3</a>.</p>"
},
"dataApiRoleArn":{
"shape":"DataApiRoleArn",
"documentation":"<p>The Amazon Resource Name (ARN) of an IAM role that permits Amazon AppFlow to access your Amazon Redshift database through the Data API. For more information, and for the polices that you attach to this role, see <a href=\"https://docs.aws.amazon.com/appflow/latest/userguide/security_iam_service-role-policies.html#access-redshift\">Allow Amazon AppFlow to access Amazon Redshift databases with the Data API</a>.</p>"
},
"isRedshiftServerless":{
"shape":"Boolean",
"documentation":"<p>Indicates whether the connector profile defines a connection to an Amazon Redshift Serverless data warehouse.</p>"
},
"clusterIdentifier":{
"shape":"ClusterIdentifier",
"documentation":"<p>The unique ID that's assigned to an Amazon Redshift cluster.</p>"
},
"workgroupName":{
"shape":"WorkgroupName",
"documentation":"<p>The name of an Amazon Redshift workgroup.</p>"
},
"databaseName":{
"shape":"DatabaseName",
"documentation":"<p>The name of an Amazon Redshift database.</p>"
}
},
"documentation":"<p> The connector-specific profile properties when using Amazon Redshift. </p>"
Expand Down Expand Up @@ -5319,6 +5346,11 @@
"max":512,
"pattern":"[\\s\\w/!@#+=.-]*"
},
"WorkgroupName":{
"type":"string",
"max":512,
"pattern":"\\S+"
},
"WriteOperationType":{
"type":"string",
"documentation":"<p> The possible write operations in the destination connector. When this value is not provided, this defaults to the <code>INSERT</code> operation. </p>",
Expand Down
Loading

0 comments on commit a942b57

Please sign in to comment.