From bcec4ff4091de60703d9248ac740bc46cab8f507 Mon Sep 17 00:00:00 2001 From: AWS SDK for Ruby Date: Fri, 24 Jun 2022 18:07:58 +0000 Subject: [PATCH] Updated API models and rebuilt service gems. --- apis/glue/2017-03-31/api-2.json | 93 +++++++++ apis/glue/2017-03-31/docs-2.json | 82 +++++++- apis/rds-data/2018-08-01/docs-2.json | 12 +- gems/aws-partitions/CHANGELOG.md | 5 + gems/aws-partitions/VERSION | 2 +- gems/aws-partitions/partitions.json | 15 -- gems/aws-sdk-glue/CHANGELOG.md | 5 + gems/aws-sdk-glue/VERSION | 2 +- gems/aws-sdk-glue/lib/aws-sdk-glue.rb | 2 +- gems/aws-sdk-glue/lib/aws-sdk-glue/client.rb | 82 +++++++- .../lib/aws-sdk-glue/client_api.rb | 52 +++++ gems/aws-sdk-glue/lib/aws-sdk-glue/types.rb | 197 +++++++++++++++++- gems/aws-sdk-rdsdataservice/CHANGELOG.md | 5 + gems/aws-sdk-rdsdataservice/VERSION | 2 +- .../lib/aws-sdk-rdsdataservice.rb | 2 +- .../lib/aws-sdk-rdsdataservice/client.rb | 49 ++++- .../lib/aws-sdk-rdsdataservice/types.rb | 35 +++- 17 files changed, 591 insertions(+), 51 deletions(-) diff --git a/apis/glue/2017-03-31/api-2.json b/apis/glue/2017-03-31/api-2.json index e2fbc0cbaa4..81bba79329f 100644 --- a/apis/glue/2017-03-31/api-2.json +++ b/apis/glue/2017-03-31/api-2.json @@ -1953,6 +1953,20 @@ {"shape":"OperationTimeoutException"} ] }, + "ListCrawls":{ + "name":"ListCrawls", + "http":{ + "method":"POST", + "requestUri":"/" + }, + "input":{"shape":"ListCrawlsRequest"}, + "output":{"shape":"ListCrawlsResponse"}, + "errors":[ + {"shape":"EntityNotFoundException"}, + {"shape":"OperationTimeoutException"}, + {"shape":"InvalidInputException"} + ] + }, "ListCustomEntityTypes":{ "name":"ListCustomEntityTypes", "http":{ @@ -4191,6 +4205,7 @@ "LogStream":{"shape":"LogStream"} } }, + "CrawlId":{"type":"string"}, "CrawlList":{ "type":"list", "member":{"shape":"Crawl"} @@ -4231,6 +4246,34 @@ } }, "CrawlerConfiguration":{"type":"string"}, + "CrawlerHistory":{ + "type":"structure", + "members":{ + "CrawlId":{"shape":"CrawlId"}, + "State":{"shape":"CrawlerHistoryState"}, + "StartTime":{"shape":"Timestamp"}, + "EndTime":{"shape":"Timestamp"}, + "Summary":{"shape":"NameString"}, + "ErrorMessage":{"shape":"DescriptionString"}, + "LogGroup":{"shape":"LogGroup"}, + "LogStream":{"shape":"LogStream"}, + "MessagePrefix":{"shape":"MessagePrefix"}, + "DPUHour":{"shape":"NonNegativeDouble"} + } + }, + "CrawlerHistoryList":{ + "type":"list", + "member":{"shape":"CrawlerHistory"} + }, + "CrawlerHistoryState":{ + "type":"string", + "enum":[ + "RUNNING", + "COMPLETED", + "FAILED", + "STOPPED" + ] + }, "CrawlerLineageSettings":{ "type":"string", "enum":[ @@ -4316,6 +4359,18 @@ "DeltaTargets":{"shape":"DeltaTargetList"} } }, + "CrawlsFilter":{ + "type":"structure", + "members":{ + "FieldName":{"shape":"FieldName"}, + "FilterOperator":{"shape":"FilterOperator"}, + "FieldValue":{"shape":"GenericString"} + } + }, + "CrawlsFilterList":{ + "type":"list", + "member":{"shape":"CrawlsFilter"} + }, "CreateBlueprintRequest":{ "type":"structure", "required":[ @@ -5720,6 +5775,16 @@ "type":"string", "pattern":"[\\s\\S]*" }, + "FieldName":{ + "type":"string", + "enum":[ + "CRAWL_ID", + "STATE", + "START_TIME", + "END_TIME", + "DPU_HOUR" + ] + }, "FieldType":{"type":"string"}, "FillMissingValues":{ "type":"structure", @@ -5785,6 +5850,17 @@ "ISNULL" ] }, + "FilterOperator":{ + "type":"string", + "enum":[ + "GT", + "GE", + "LT", + "LE", + "EQ", + "NE" + ] + }, "FilterString":{ "type":"string", "max":2048, @@ -7689,6 +7765,23 @@ "NextToken":{"shape":"Token"} } }, + "ListCrawlsRequest":{ + "type":"structure", + "required":["CrawlerName"], + "members":{ + "CrawlerName":{"shape":"NameString"}, + "MaxResults":{"shape":"PageSize"}, + "Filters":{"shape":"CrawlsFilterList"}, + "NextToken":{"shape":"Token"} + } + }, + "ListCrawlsResponse":{ + "type":"structure", + "members":{ + "Crawls":{"shape":"CrawlerHistoryList"}, + "NextToken":{"shape":"Token"} + } + }, "ListCustomEntityTypesRequest":{ "type":"structure", "members":{ diff --git a/apis/glue/2017-03-31/docs-2.json b/apis/glue/2017-03-31/docs-2.json index 5926a2bd075..2051ffd6ad5 100644 --- a/apis/glue/2017-03-31/docs-2.json +++ b/apis/glue/2017-03-31/docs-2.json @@ -128,6 +128,7 @@ "ImportCatalogToGlue": "

Imports an existing Amazon Athena Data Catalog to Glue.

", "ListBlueprints": "

Lists all the blueprint names in an account.

", "ListCrawlers": "

Retrieves the names of all crawler resources in this Amazon Web Services account, or the resources with the specified tag. This operation allows you to see which resources are available in your account, and their names.

This operation takes the optional Tags field, which you can use as a filter on the response so that tagged resources can be retrieved as a group. If you choose to use tags filtering, only resources with the tag are retrieved.

", + "ListCrawls": "

Returns all the crawls of a specified crawler. Returns only the crawls that have occurred since the launch date of the crawler history feature, and only retains up to 12 months of crawls. Older crawls will not be returned.

You may use this API to:

", "ListCustomEntityTypes": "

Lists all the custom patterns that have been created.

", "ListDevEndpoints": "

Retrieves the names of all DevEndpoint resources in this Amazon Web Services account, or the resources with the specified tag. This operation allows you to see which resources are available in your account, and their names.

This operation takes the optional Tags field, which you can use as a filter on the response so that tagged resources can be retrieved as a group. If you choose to use tags filtering, only resources with the tag are retrieved.

", "ListJobs": "

Retrieves the names of all job resources in this Amazon Web Services account, or the resources with the specified tag. This operation allows you to see which resources are available in your account, and their names.

This operation takes the optional Tags field, which you can use as a filter on the response so that tagged resources can be retrieved as a group. If you choose to use tags filtering, only resources with the tag are retrieved.

", @@ -1331,6 +1332,12 @@ "CrawlList$member": null } }, + "CrawlId": { + "base": null, + "refs": { + "CrawlerHistory$CrawlId": "

A UUID identifier for each crawl.

" + } + }, "CrawlList": { "base": null, "refs": { @@ -1359,6 +1366,24 @@ "UpdateCrawlerRequest$Configuration": "

Crawler configuration information. This versioned JSON string allows users to specify aspects of a crawler's behavior. For more information, see Configuring a Crawler.

" } }, + "CrawlerHistory": { + "base": "

Contains the information for a run of a crawler.

", + "refs": { + "CrawlerHistoryList$member": null + } + }, + "CrawlerHistoryList": { + "base": null, + "refs": { + "ListCrawlsResponse$Crawls": "

A list of CrawlerHistory objects representing the crawl runs that meet your criteria.

" + } + }, + "CrawlerHistoryState": { + "base": null, + "refs": { + "CrawlerHistory$State": "

The state of the crawl.

" + } + }, "CrawlerLineageSettings": { "base": null, "refs": { @@ -1436,6 +1461,18 @@ "UpdateCrawlerRequest$Targets": "

A list of targets to crawl.

" } }, + "CrawlsFilter": { + "base": "

A list of fields, comparators and value that you can use to filter the crawler runs for a specified crawler.

", + "refs": { + "CrawlsFilterList$member": null + } + }, + "CrawlsFilterList": { + "base": null, + "refs": { + "ListCrawlsRequest$Filters": "

Filters the crawls by the criteria you specify in a list of CrawlsFilter objects.

" + } + }, "CreateBlueprintRequest": { "base": null, "refs": { @@ -2130,6 +2167,7 @@ "ConnectionInput$Description": "

The description of the connection.

", "Crawl$ErrorMessage": "

The error message associated with the crawl.

", "Crawler$Description": "

A description of the crawler.

", + "CrawlerHistory$ErrorMessage": "

If an error occurred, the error message associated with the crawl.

", "CreateCrawlerRequest$Description": "

A description of the new crawler.

", "CreateJobRequest$Description": "

Description of the job being defined.

", "CreateMLTransformRequest$Description": "

A description of the machine learning transform that is being defined. The default is an empty string.

", @@ -2564,6 +2602,12 @@ "CustomCode$Code": "

The custom code that is used to perform the data transformation.

" } }, + "FieldName": { + "base": null, + "refs": { + "CrawlsFilter$FieldName": "

A key used to filter the crawler runs for a specified crawler. Valid values for each of the field names are:

" + } + }, "FieldType": { "base": null, "refs": { @@ -2607,6 +2651,12 @@ "FilterExpression$Operation": "

The type of operation to perform in the expression.

" } }, + "FilterOperator": { + "base": null, + "refs": { + "CrawlsFilter$FilterOperator": "

A defined comparator that operates on the value. The available operators are:

" + } + }, "FilterString": { "base": null, "refs": { @@ -2706,6 +2756,7 @@ "AdditionalPlanOptionsMap$value": null, "Blueprint$BlueprintLocation": "

Specifies the path in Amazon S3 where the blueprint is published.

", "Blueprint$BlueprintServiceLocation": "

Specifies a path in Amazon S3 where the blueprint is copied when you call CreateBlueprint/UpdateBlueprint to register the blueprint in Glue.

", + "CrawlsFilter$FieldValue": "

The value provided for comparison on the crawl field.

", "CreateDevEndpointRequest$EndpointName": "

The name to be assigned to the new DevEndpoint.

", "CreateDevEndpointRequest$SubnetId": "

The subnet ID for the new DevEndpoint to use.

", "CreateDevEndpointRequest$PublicKey": "

The public key to be used by this DevEndpoint for authentication. This attribute is provided for backward compatibility because the recommended attribute to use is public keys.

", @@ -3992,11 +4043,11 @@ } }, "LakeFormationConfiguration": { - "base": "

Specifies AWS Lake Formation configuration settings for the crawler.

", + "base": "

Specifies Lake Formation configuration settings for the crawler.

", "refs": { - "Crawler$LakeFormationConfiguration": "

Specifies whether the crawler should use AWS Lake Formation credentials for the crawler instead of the IAM role credentials.

", - "CreateCrawlerRequest$LakeFormationConfiguration": null, - "UpdateCrawlerRequest$LakeFormationConfiguration": null + "Crawler$LakeFormationConfiguration": "

Specifies whether the crawler should use Lake Formation credentials for the crawler instead of the IAM role credentials.

", + "CreateCrawlerRequest$LakeFormationConfiguration": "

Specifies Lake Formation configuration settings for the crawler.

", + "UpdateCrawlerRequest$LakeFormationConfiguration": "

Specifies Lake Formation configuration settings for the crawler.

" } }, "Language": { @@ -4072,6 +4123,16 @@ "refs": { } }, + "ListCrawlsRequest": { + "base": null, + "refs": { + } + }, + "ListCrawlsResponse": { + "base": null, + "refs": { + } + }, "ListCustomEntityTypesRequest": { "base": null, "refs": { @@ -4212,6 +4273,7 @@ "base": null, "refs": { "Crawl$LogGroup": "

The log group associated with the crawl.

", + "CrawlerHistory$LogGroup": "

The log group associated with the crawl.

", "LastCrawlInfo$LogGroup": "

The log group for the last crawl.

" } }, @@ -4219,6 +4281,7 @@ "base": null, "refs": { "Crawl$LogStream": "

The log stream associated with the crawl.

", + "CrawlerHistory$LogStream": "

The log stream associated with the crawl.

", "LastCrawlInfo$LogStream": "

The log stream for the last crawl.

" } }, @@ -4364,6 +4427,7 @@ "MessagePrefix": { "base": null, "refs": { + "CrawlerHistory$MessagePrefix": "

The prefix for a CloudWatch message about this crawl.

", "LastCrawlInfo$MessagePrefix": "

The prefix for a message about this crawl.

" } }, @@ -4532,6 +4596,7 @@ "ConnectionPasswordEncryption$AwsKmsKeyId": "

An KMS key that is used to encrypt the connection password.

If connection password protection is enabled, the caller of CreateConnection and UpdateConnection needs at least kms:Encrypt permission on the specified KMS key, to encrypt passwords before storing them in the Data Catalog.

You can set the decrypt permission to enable or restrict access on the password key according to your security requirements.

", "ContextWords$member": null, "Crawler$Name": "

The name of the crawler.

", + "CrawlerHistory$Summary": "

A run summary for the specific crawl in JSON. Contains the catalog tables and partitions that were added, updated, or deleted.

", "CrawlerMetrics$CrawlerName": "

The name of the crawler.

", "CrawlerNameList$member": null, "CreateBlueprintResponse$Name": "

Returns the name of the blueprint that was registered.

", @@ -4680,6 +4745,7 @@ "JsonClassifier$Name": "

The name of the classifier.

", "KeyList$member": null, "KeySchemaElement$Name": "

The name of a partition key.

", + "ListCrawlsRequest$CrawlerName": "

The name of the crawler whose runs you want to retrieve.

", "ListStatementsRequest$SessionId": "

The Session ID of the statements.

", "ListTriggersRequest$DependentJobName": "

The name of the job for which to retrieve triggers. The trigger that can start this job is returned. If there is no such trigger, all triggers are returned.

", "MLTransform$Name": "

A user-defined name for the machine learning transform. Names are not guaranteed unique and can be changed at any time.

", @@ -4886,6 +4952,7 @@ "base": null, "refs": { "BinaryColumnStatisticsData$AverageLength": "

The average bit sequence length in the column.

", + "CrawlerHistory$DPUHour": "

The number of data processing units (DPU) used in hours for the crawl.

", "CrawlerMetrics$TimeLeftSeconds": "

The estimated time left to complete a running crawl.

", "CrawlerMetrics$LastRuntimeSeconds": "

The duration of the crawler's most recent run, in seconds.

", "CrawlerMetrics$MedianRuntimeSeconds": "

The median duration of this crawler's runs, in seconds.

", @@ -4984,7 +5051,7 @@ "GetWorkflowRequest$IncludeGraph": "

Specifies whether to include a graph when returning the workflow resource metadata.

", "GetWorkflowRunRequest$IncludeGraph": "

Specifies whether to include the workflow graph in response or not.

", "GetWorkflowRunsRequest$IncludeGraph": "

Specifies whether to include the workflow graph in response or not.

", - "LakeFormationConfiguration$UseLakeFormationCredentials": "

Specifies whether to use AWS Lake Formation credentials for the crawler instead of the IAM role credentials.

", + "LakeFormationConfiguration$UseLakeFormationCredentials": "

Specifies whether to use Lake Formation credentials for the crawler instead of the IAM role credentials.

", "MongoDBTarget$ScanAll": "

Indicates whether to scan all the records, or to sample rows from the table. Scanning all the records can take a long time when the table is not a high throughput table.

A value of true means to scan all records, while a value of false means to sample the records. If no value is specified, the value defaults to true.

", "UpdateCsvClassifierRequest$DisableValueTrimming": "

Specifies not to trim values before identifying the type of column values. The default value is true.

", "UpdateCsvClassifierRequest$AllowSingleColumn": "

Enables the processing of files that contain only one column.

" @@ -5214,6 +5281,7 @@ "GetWorkflowRunsRequest$MaxResults": "

The maximum number of workflow runs to be included in the response.

", "ListBlueprintsRequest$MaxResults": "

The maximum size of a list to return.

", "ListCrawlersRequest$MaxResults": "

The maximum size of a list to return.

", + "ListCrawlsRequest$MaxResults": "

The maximum number of results to return. The default is 20, and maximum is 100.

", "ListCustomEntityTypesRequest$MaxResults": "

The maximum number of results to return.

", "ListDevEndpointsRequest$MaxResults": "

The maximum size of a list to return.

", "ListJobsRequest$MaxResults": "

The maximum size of a list to return.

", @@ -6765,6 +6833,8 @@ "Connection$LastUpdatedTime": "

The last time that this connection definition was updated.

", "Crawler$CreationTime": "

The time that the crawler was created.

", "Crawler$LastUpdated": "

The time that the crawler was last updated.

", + "CrawlerHistory$StartTime": "

The date and time on which the crawl started.

", + "CrawlerHistory$EndTime": "

The date and time on which the crawl ended.

", "CsvClassifier$CreationTime": "

The time that this classifier was registered.

", "CsvClassifier$LastUpdated": "

The time that this classifier was last updated.

", "Database$CreateTime": "

The time at which the metadata database was created in the catalog.

", @@ -6870,6 +6940,8 @@ "GetUserDefinedFunctionsResponse$NextToken": "

A continuation token, if the list of functions returned does not include the last requested function.

", "ListCrawlersRequest$NextToken": "

A continuation token, if this is a continuation request.

", "ListCrawlersResponse$NextToken": "

A continuation token, if the returned list does not contain the last metric available.

", + "ListCrawlsRequest$NextToken": "

A continuation token, if this is a continuation call.

", + "ListCrawlsResponse$NextToken": "

A continuation token for paginating the returned list of tokens, returned if the current segment of the list is not the last.

", "SearchTablesRequest$NextToken": "

A continuation token, included if this is a continuation call.

", "SearchTablesResponse$NextToken": "

A continuation token, present if the current list segment is not the last.

" } diff --git a/apis/rds-data/2018-08-01/docs-2.json b/apis/rds-data/2018-08-01/docs-2.json index bd6146ba1f8..4d34c07df21 100644 --- a/apis/rds-data/2018-08-01/docs-2.json +++ b/apis/rds-data/2018-08-01/docs-2.json @@ -2,11 +2,11 @@ "version": "2.0", "service": "

Amazon RDS Data Service

Amazon RDS provides an HTTP endpoint to run SQL statements on an Amazon Aurora Serverless DB cluster. To run these statements, you work with the Data Service API.

For more information about the Data Service API, see Using the Data API in the Amazon Aurora User Guide.

", "operations": { - "BatchExecuteStatement": "

Runs a batch SQL statement over an array of data.

You can run bulk update and insert operations for multiple records using a DML statement with different parameter sets. Bulk operations can provide a significant performance improvement over individual insert and update operations.

If a call isn't part of a transaction because it doesn't include the transactionID parameter, changes that result from the call are committed automatically.

", + "BatchExecuteStatement": "

Runs a batch SQL statement over an array of data.

You can run bulk update and insert operations for multiple records using a DML statement with different parameter sets. Bulk operations can provide a significant performance improvement over individual insert and update operations.

If a call isn't part of a transaction because it doesn't include the transactionID parameter, changes that result from the call are committed automatically.

There isn't a fixed upper limit on the number of parameter sets. However, the maximum size of the HTTP request submitted through the Data API is 4 MiB. If the request exceeds this limit, the Data API returns an error and doesn't process the request. This 4-MiB limit includes the size of the HTTP headers and the JSON notation in the request. Thus, the number of parameter sets that you can include depends on a combination of factors, such as the size of the SQL statement and the size of each parameter set.

The response size limit is 1 MiB. If the call returns more than 1 MiB of response data, the call is terminated.

", "BeginTransaction": "

Starts a SQL transaction.

 <important> <p>A transaction can run for a maximum of 24 hours. A transaction is terminated and rolled back automatically after 24 hours.</p> <p>A transaction times out if no calls use its transaction ID in three minutes. If a transaction times out before it's committed, it's rolled back automatically.</p> <p>DDL statements inside a transaction cause an implicit commit. We recommend that you run each DDL statement in a separate <code>ExecuteStatement</code> call with <code>continueAfterTimeout</code> enabled.</p> </important> 
", "CommitTransaction": "

Ends a SQL transaction started with the BeginTransaction operation and commits the changes.

", "ExecuteSql": "

Runs one or more SQL statements.

This operation is deprecated. Use the BatchExecuteStatement or ExecuteStatement operation.

", - "ExecuteStatement": "

Runs a SQL statement against a database.

If a call isn't part of a transaction because it doesn't include the transactionID parameter, changes that result from the call are committed automatically.

If the binary response data from the database is more than 1 MB, the call is terminated.

", + "ExecuteStatement": "

Runs a SQL statement against a database.

If a call isn't part of a transaction because it doesn't include the transactionID parameter, changes that result from the call are committed automatically.

If the binary response data from the database is more than 1 MB, the call is terminated.

", "RollbackTransaction": "

Performs a rollback of a transaction. Rolling back a transaction cancels its changes.

" }, "shapes": { @@ -19,15 +19,15 @@ "base": null, "refs": { "BatchExecuteStatementRequest$resourceArn": "

The Amazon Resource Name (ARN) of the Aurora Serverless DB cluster.

", - "BatchExecuteStatementRequest$secretArn": "

The name or ARN of the secret that enables access to the DB cluster.

", + "BatchExecuteStatementRequest$secretArn": "

The ARN of the secret that enables access to the DB cluster. Enter the database user name and password for the credentials in the secret.

For information about creating the secret, see Create a database secret.

", "BeginTransactionRequest$resourceArn": "

The Amazon Resource Name (ARN) of the Aurora Serverless DB cluster.

", "BeginTransactionRequest$secretArn": "

The name or ARN of the secret that enables access to the DB cluster.

", "CommitTransactionRequest$resourceArn": "

The Amazon Resource Name (ARN) of the Aurora Serverless DB cluster.

", "CommitTransactionRequest$secretArn": "

The name or ARN of the secret that enables access to the DB cluster.

", - "ExecuteSqlRequest$awsSecretStoreArn": "

The Amazon Resource Name (ARN) of the secret that enables access to the DB cluster.

", + "ExecuteSqlRequest$awsSecretStoreArn": "

The Amazon Resource Name (ARN) of the secret that enables access to the DB cluster. Enter the database user name and password for the credentials in the secret.

For information about creating the secret, see Create a database secret.

", "ExecuteSqlRequest$dbClusterOrInstanceArn": "

The ARN of the Aurora Serverless DB cluster.

", "ExecuteStatementRequest$resourceArn": "

The Amazon Resource Name (ARN) of the Aurora Serverless DB cluster.

", - "ExecuteStatementRequest$secretArn": "

The name or ARN of the secret that enables access to the DB cluster.

", + "ExecuteStatementRequest$secretArn": "

The ARN of the secret that enables access to the DB cluster. Enter the database user name and password for the credentials in the secret.

For information about creating the secret, see Create a database secret.

", "RollbackTransactionRequest$resourceArn": "

The Amazon Resource Name (ARN) of the Aurora Serverless DB cluster.

", "RollbackTransactionRequest$secretArn": "

The name or ARN of the secret that enables access to the DB cluster.

" } @@ -390,7 +390,7 @@ "SqlStatement": { "base": null, "refs": { - "BatchExecuteStatementRequest$sql": "

The SQL statement to run.

", + "BatchExecuteStatementRequest$sql": "

The SQL statement to run. Don't include a semicolon (;) at the end of the SQL statement.

", "ExecuteSqlRequest$sqlStatements": "

One or more SQL statements to run on the DB cluster.

You can separate SQL statements from each other with a semicolon (;). Any valid SQL statement is permitted, including data definition, data manipulation, and commit statements.

", "ExecuteStatementRequest$sql": "

The SQL statement to run.

" } diff --git a/gems/aws-partitions/CHANGELOG.md b/gems/aws-partitions/CHANGELOG.md index 7effca138ae..c4fe9edfa63 100644 --- a/gems/aws-partitions/CHANGELOG.md +++ b/gems/aws-partitions/CHANGELOG.md @@ -1,6 +1,11 @@ Unreleased Changes ------------------ +1.601.0 (2022-06-24) +------------------ + +* Feature - Updated the partitions source data the determines the AWS service regions and endpoints. + 1.600.0 (2022-06-17) ------------------ diff --git a/gems/aws-partitions/VERSION b/gems/aws-partitions/VERSION index dd2500bdf3e..ae61dab5f3d 100644 --- a/gems/aws-partitions/VERSION +++ b/gems/aws-partitions/VERSION @@ -1 +1 @@ -1.600.0 +1.601.0 diff --git a/gems/aws-partitions/partitions.json b/gems/aws-partitions/partitions.json index 5c173af813f..704a134e19f 100644 --- a/gems/aws-partitions/partitions.json +++ b/gems/aws-partitions/partitions.json @@ -9815,21 +9815,6 @@ } } }, - "redshift-serverless" : { - "endpoints" : { - "ap-northeast-1" : { }, - "ap-northeast-2" : { }, - "ap-southeast-1" : { }, - "ap-southeast-2" : { }, - "eu-central-1" : { }, - "eu-north-1" : { }, - "eu-west-1" : { }, - "eu-west-2" : { }, - "us-east-1" : { }, - "us-east-2" : { }, - "us-west-2" : { } - } - }, "rekognition" : { "endpoints" : { "ap-northeast-1" : { }, diff --git a/gems/aws-sdk-glue/CHANGELOG.md b/gems/aws-sdk-glue/CHANGELOG.md index 8d5ad0366f3..a01848d252c 100644 --- a/gems/aws-sdk-glue/CHANGELOG.md +++ b/gems/aws-sdk-glue/CHANGELOG.md @@ -1,6 +1,11 @@ Unreleased Changes ------------------ +1.113.0 (2022-06-24) +------------------ + +* Feature - This release enables the new ListCrawls API for viewing the AWS Glue Crawler run history. + 1.112.0 (2022-05-17) ------------------ diff --git a/gems/aws-sdk-glue/VERSION b/gems/aws-sdk-glue/VERSION index 628cac6f078..7bd0f252f61 100644 --- a/gems/aws-sdk-glue/VERSION +++ b/gems/aws-sdk-glue/VERSION @@ -1 +1 @@ -1.112.0 +1.113.0 diff --git a/gems/aws-sdk-glue/lib/aws-sdk-glue.rb b/gems/aws-sdk-glue/lib/aws-sdk-glue.rb index 1bffb253a32..c382999b64c 100644 --- a/gems/aws-sdk-glue/lib/aws-sdk-glue.rb +++ b/gems/aws-sdk-glue/lib/aws-sdk-glue.rb @@ -48,6 +48,6 @@ # @!group service module Aws::Glue - GEM_VERSION = '1.112.0' + GEM_VERSION = '1.113.0' end diff --git a/gems/aws-sdk-glue/lib/aws-sdk-glue/client.rb b/gems/aws-sdk-glue/lib/aws-sdk-glue/client.rb index 8c8e3e8a8e7..2dc0f4cb510 100644 --- a/gems/aws-sdk-glue/lib/aws-sdk-glue/client.rb +++ b/gems/aws-sdk-glue/lib/aws-sdk-glue/client.rb @@ -2265,7 +2265,7 @@ def create_connection(params = {}, options = {}) # Specifies data lineage configuration settings for the crawler. # # @option params [Types::LakeFormationConfiguration] :lake_formation_configuration - # Specifies AWS Lake Formation configuration settings for the crawler. + # Specifies Lake Formation configuration settings for the crawler. # # @option params [String] :configuration # Crawler configuration information. This versioned JSON string allows @@ -10918,6 +10918,82 @@ def list_crawlers(params = {}, options = {}) req.send_request(options) end + # Returns all the crawls of a specified crawler. Returns only the crawls + # that have occurred since the launch date of the crawler history + # feature, and only retains up to 12 months of crawls. Older crawls will + # not be returned. + # + # You may use this API to: + # + # * Retrive all the crawls of a specified crawler. + # + # * Retrieve all the crawls of a specified crawler within a limited + # count. + # + # * Retrieve all the crawls of a specified crawler in a specific time + # range. + # + # * Retrieve all the crawls of a specified crawler with a particular + # state, crawl ID, or DPU hour value. + # + # @option params [required, String] :crawler_name + # The name of the crawler whose runs you want to retrieve. + # + # @option params [Integer] :max_results + # The maximum number of results to return. The default is 20, and + # maximum is 100. + # + # @option params [Array] :filters + # Filters the crawls by the criteria you specify in a list of + # `CrawlsFilter` objects. + # + # @option params [String] :next_token + # A continuation token, if this is a continuation call. + # + # @return [Types::ListCrawlsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods: + # + # * {Types::ListCrawlsResponse#crawls #crawls} => Array<Types::CrawlerHistory> + # * {Types::ListCrawlsResponse#next_token #next_token} => String + # + # @example Request syntax with placeholder values + # + # resp = client.list_crawls({ + # crawler_name: "NameString", # required + # max_results: 1, + # filters: [ + # { + # field_name: "CRAWL_ID", # accepts CRAWL_ID, STATE, START_TIME, END_TIME, DPU_HOUR + # filter_operator: "GT", # accepts GT, GE, LT, LE, EQ, NE + # field_value: "GenericString", + # }, + # ], + # next_token: "Token", + # }) + # + # @example Response structure + # + # resp.crawls #=> Array + # resp.crawls[0].crawl_id #=> String + # resp.crawls[0].state #=> String, one of "RUNNING", "COMPLETED", "FAILED", "STOPPED" + # resp.crawls[0].start_time #=> Time + # resp.crawls[0].end_time #=> Time + # resp.crawls[0].summary #=> String + # resp.crawls[0].error_message #=> String + # resp.crawls[0].log_group #=> String + # resp.crawls[0].log_stream #=> String + # resp.crawls[0].message_prefix #=> String + # resp.crawls[0].dpu_hour #=> Float + # resp.next_token #=> String + # + # @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/ListCrawls AWS API Documentation + # + # @overload list_crawls(params = {}) + # @param [Hash] params ({}) + def list_crawls(params = {}, options = {}) + req = build_request(:list_crawls, params) + req.send_request(options) + end + # Lists all the custom patterns that have been created. # # @option params [String] :next_token @@ -13327,7 +13403,7 @@ def update_connection(params = {}, options = {}) # Specifies data lineage configuration settings for the crawler. # # @option params [Types::LakeFormationConfiguration] :lake_formation_configuration - # Specifies AWS Lake Formation configuration settings for the crawler. + # Specifies Lake Formation configuration settings for the crawler. # # @option params [String] :configuration # Crawler configuration information. This versioned JSON string allows @@ -14974,7 +15050,7 @@ def build_request(operation_name, params = {}) params: params, config: config) context[:gem_name] = 'aws-sdk-glue' - context[:gem_version] = '1.112.0' + context[:gem_version] = '1.113.0' Seahorse::Client::Request.new(handlers, context) end diff --git a/gems/aws-sdk-glue/lib/aws-sdk-glue/client_api.rb b/gems/aws-sdk-glue/lib/aws-sdk-glue/client_api.rb index c87127281d3..71b303981dc 100644 --- a/gems/aws-sdk-glue/lib/aws-sdk-glue/client_api.rb +++ b/gems/aws-sdk-glue/lib/aws-sdk-glue/client_api.rb @@ -179,10 +179,14 @@ module ClientApi ConnectionsList = Shapes::StructureShape.new(name: 'ConnectionsList') ContextWords = Shapes::ListShape.new(name: 'ContextWords') Crawl = Shapes::StructureShape.new(name: 'Crawl') + CrawlId = Shapes::StringShape.new(name: 'CrawlId') CrawlList = Shapes::ListShape.new(name: 'CrawlList') CrawlState = Shapes::StringShape.new(name: 'CrawlState') Crawler = Shapes::StructureShape.new(name: 'Crawler') CrawlerConfiguration = Shapes::StringShape.new(name: 'CrawlerConfiguration') + CrawlerHistory = Shapes::StructureShape.new(name: 'CrawlerHistory') + CrawlerHistoryList = Shapes::ListShape.new(name: 'CrawlerHistoryList') + CrawlerHistoryState = Shapes::StringShape.new(name: 'CrawlerHistoryState') CrawlerLineageSettings = Shapes::StringShape.new(name: 'CrawlerLineageSettings') CrawlerList = Shapes::ListShape.new(name: 'CrawlerList') CrawlerMetrics = Shapes::StructureShape.new(name: 'CrawlerMetrics') @@ -195,6 +199,8 @@ module ClientApi CrawlerState = Shapes::StringShape.new(name: 'CrawlerState') CrawlerStoppingException = Shapes::StructureShape.new(name: 'CrawlerStoppingException') CrawlerTargets = Shapes::StructureShape.new(name: 'CrawlerTargets') + CrawlsFilter = Shapes::StructureShape.new(name: 'CrawlsFilter') + CrawlsFilterList = Shapes::ListShape.new(name: 'CrawlsFilterList') CreateBlueprintRequest = Shapes::StructureShape.new(name: 'CreateBlueprintRequest') CreateBlueprintResponse = Shapes::StructureShape.new(name: 'CreateBlueprintResponse') CreateClassifierRequest = Shapes::StructureShape.new(name: 'CreateClassifierRequest') @@ -361,6 +367,7 @@ module ClientApi ExistCondition = Shapes::StringShape.new(name: 'ExistCondition') ExportLabelsTaskRunProperties = Shapes::StructureShape.new(name: 'ExportLabelsTaskRunProperties') ExtendedString = Shapes::StringShape.new(name: 'ExtendedString') + FieldName = Shapes::StringShape.new(name: 'FieldName') FieldType = Shapes::StringShape.new(name: 'FieldType') FillMissingValues = Shapes::StructureShape.new(name: 'FillMissingValues') Filter = Shapes::StructureShape.new(name: 'Filter') @@ -368,6 +375,7 @@ module ClientApi FilterExpressions = Shapes::ListShape.new(name: 'FilterExpressions') FilterLogicalOperator = Shapes::StringShape.new(name: 'FilterLogicalOperator') FilterOperation = Shapes::StringShape.new(name: 'FilterOperation') + FilterOperator = Shapes::StringShape.new(name: 'FilterOperator') FilterString = Shapes::StringShape.new(name: 'FilterString') FilterValue = Shapes::StructureShape.new(name: 'FilterValue') FilterValueType = Shapes::StringShape.new(name: 'FilterValueType') @@ -589,6 +597,8 @@ module ClientApi ListBlueprintsResponse = Shapes::StructureShape.new(name: 'ListBlueprintsResponse') ListCrawlersRequest = Shapes::StructureShape.new(name: 'ListCrawlersRequest') ListCrawlersResponse = Shapes::StructureShape.new(name: 'ListCrawlersResponse') + ListCrawlsRequest = Shapes::StructureShape.new(name: 'ListCrawlsRequest') + ListCrawlsResponse = Shapes::StructureShape.new(name: 'ListCrawlsResponse') ListCustomEntityTypesRequest = Shapes::StructureShape.new(name: 'ListCustomEntityTypesRequest') ListCustomEntityTypesResponse = Shapes::StructureShape.new(name: 'ListCustomEntityTypesResponse') ListDevEndpointsRequest = Shapes::StructureShape.new(name: 'ListDevEndpointsRequest') @@ -1600,6 +1610,20 @@ module ClientApi Crawler.add_member(:lake_formation_configuration, Shapes::ShapeRef.new(shape: LakeFormationConfiguration, location_name: "LakeFormationConfiguration")) Crawler.struct_class = Types::Crawler + CrawlerHistory.add_member(:crawl_id, Shapes::ShapeRef.new(shape: CrawlId, location_name: "CrawlId")) + CrawlerHistory.add_member(:state, Shapes::ShapeRef.new(shape: CrawlerHistoryState, location_name: "State")) + CrawlerHistory.add_member(:start_time, Shapes::ShapeRef.new(shape: Timestamp, location_name: "StartTime")) + CrawlerHistory.add_member(:end_time, Shapes::ShapeRef.new(shape: Timestamp, location_name: "EndTime")) + CrawlerHistory.add_member(:summary, Shapes::ShapeRef.new(shape: NameString, location_name: "Summary")) + CrawlerHistory.add_member(:error_message, Shapes::ShapeRef.new(shape: DescriptionString, location_name: "ErrorMessage")) + CrawlerHistory.add_member(:log_group, Shapes::ShapeRef.new(shape: LogGroup, location_name: "LogGroup")) + CrawlerHistory.add_member(:log_stream, Shapes::ShapeRef.new(shape: LogStream, location_name: "LogStream")) + CrawlerHistory.add_member(:message_prefix, Shapes::ShapeRef.new(shape: MessagePrefix, location_name: "MessagePrefix")) + CrawlerHistory.add_member(:dpu_hour, Shapes::ShapeRef.new(shape: NonNegativeDouble, location_name: "DPUHour")) + CrawlerHistory.struct_class = Types::CrawlerHistory + + CrawlerHistoryList.member = Shapes::ShapeRef.new(shape: CrawlerHistory) + CrawlerList.member = Shapes::ShapeRef.new(shape: Crawler) CrawlerMetrics.add_member(:crawler_name, Shapes::ShapeRef.new(shape: NameString, location_name: "CrawlerName")) @@ -1636,6 +1660,13 @@ module ClientApi CrawlerTargets.add_member(:delta_targets, Shapes::ShapeRef.new(shape: DeltaTargetList, location_name: "DeltaTargets")) CrawlerTargets.struct_class = Types::CrawlerTargets + CrawlsFilter.add_member(:field_name, Shapes::ShapeRef.new(shape: FieldName, location_name: "FieldName")) + CrawlsFilter.add_member(:filter_operator, Shapes::ShapeRef.new(shape: FilterOperator, location_name: "FilterOperator")) + CrawlsFilter.add_member(:field_value, Shapes::ShapeRef.new(shape: GenericString, location_name: "FieldValue")) + CrawlsFilter.struct_class = Types::CrawlsFilter + + CrawlsFilterList.member = Shapes::ShapeRef.new(shape: CrawlsFilter) + CreateBlueprintRequest.add_member(:name, Shapes::ShapeRef.new(shape: OrchestrationNameString, required: true, location_name: "Name")) CreateBlueprintRequest.add_member(:description, Shapes::ShapeRef.new(shape: Generic512CharString, location_name: "Description")) CreateBlueprintRequest.add_member(:blueprint_location, Shapes::ShapeRef.new(shape: OrchestrationS3Location, required: true, location_name: "BlueprintLocation")) @@ -3285,6 +3316,16 @@ module ClientApi ListCrawlersResponse.add_member(:next_token, Shapes::ShapeRef.new(shape: Token, location_name: "NextToken")) ListCrawlersResponse.struct_class = Types::ListCrawlersResponse + ListCrawlsRequest.add_member(:crawler_name, Shapes::ShapeRef.new(shape: NameString, required: true, location_name: "CrawlerName")) + ListCrawlsRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: PageSize, location_name: "MaxResults")) + ListCrawlsRequest.add_member(:filters, Shapes::ShapeRef.new(shape: CrawlsFilterList, location_name: "Filters")) + ListCrawlsRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: Token, location_name: "NextToken")) + ListCrawlsRequest.struct_class = Types::ListCrawlsRequest + + ListCrawlsResponse.add_member(:crawls, Shapes::ShapeRef.new(shape: CrawlerHistoryList, location_name: "Crawls")) + ListCrawlsResponse.add_member(:next_token, Shapes::ShapeRef.new(shape: Token, location_name: "NextToken")) + ListCrawlsResponse.struct_class = Types::ListCrawlsResponse + ListCustomEntityTypesRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: PaginationToken, location_name: "NextToken")) ListCustomEntityTypesRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: PageSize, location_name: "MaxResults")) ListCustomEntityTypesRequest.struct_class = Types::ListCustomEntityTypesRequest @@ -6455,6 +6496,17 @@ module ClientApi ) end) + api.add_operation(:list_crawls, Seahorse::Model::Operation.new.tap do |o| + o.name = "ListCrawls" + o.http_method = "POST" + o.http_request_uri = "/" + o.input = Shapes::ShapeRef.new(shape: ListCrawlsRequest) + o.output = Shapes::ShapeRef.new(shape: ListCrawlsResponse) + o.errors << Shapes::ShapeRef.new(shape: EntityNotFoundException) + o.errors << Shapes::ShapeRef.new(shape: OperationTimeoutException) + o.errors << Shapes::ShapeRef.new(shape: InvalidInputException) + end) + api.add_operation(:list_custom_entity_types, Seahorse::Model::Operation.new.tap do |o| o.name = "ListCustomEntityTypes" o.http_method = "POST" diff --git a/gems/aws-sdk-glue/lib/aws-sdk-glue/types.rb b/gems/aws-sdk-glue/lib/aws-sdk-glue/types.rb index ec2c1e65a02..cab4753dc61 100644 --- a/gems/aws-sdk-glue/lib/aws-sdk-glue/types.rb +++ b/gems/aws-sdk-glue/lib/aws-sdk-glue/types.rb @@ -4124,8 +4124,8 @@ class Crawl < Struct.new( # @return [String] # # @!attribute [rw] lake_formation_configuration - # Specifies whether the crawler should use AWS Lake Formation - # credentials for the crawler instead of the IAM role credentials. + # Specifies whether the crawler should use Lake Formation credentials + # for the crawler instead of the IAM role credentials. # @return [Types::LakeFormationConfiguration] # # @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/Crawler AWS API Documentation @@ -4155,6 +4155,67 @@ class Crawler < Struct.new( include Aws::Structure end + # Contains the information for a run of a crawler. + # + # @!attribute [rw] crawl_id + # A UUID identifier for each crawl. + # @return [String] + # + # @!attribute [rw] state + # The state of the crawl. + # @return [String] + # + # @!attribute [rw] start_time + # The date and time on which the crawl started. + # @return [Time] + # + # @!attribute [rw] end_time + # The date and time on which the crawl ended. + # @return [Time] + # + # @!attribute [rw] summary + # A run summary for the specific crawl in JSON. Contains the catalog + # tables and partitions that were added, updated, or deleted. + # @return [String] + # + # @!attribute [rw] error_message + # If an error occurred, the error message associated with the crawl. + # @return [String] + # + # @!attribute [rw] log_group + # The log group associated with the crawl. + # @return [String] + # + # @!attribute [rw] log_stream + # The log stream associated with the crawl. + # @return [String] + # + # @!attribute [rw] message_prefix + # The prefix for a CloudWatch message about this crawl. + # @return [String] + # + # @!attribute [rw] dpu_hour + # The number of data processing units (DPU) used in hours for the + # crawl. + # @return [Float] + # + # @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/CrawlerHistory AWS API Documentation + # + class CrawlerHistory < Struct.new( + :crawl_id, + :state, + :start_time, + :end_time, + :summary, + :error_message, + :log_group, + :log_stream, + :message_prefix, + :dpu_hour) + SENSITIVE = [] + include Aws::Structure + end + # Metrics for a specified crawler. # # @!attribute [rw] crawler_name @@ -4352,6 +4413,64 @@ class CrawlerTargets < Struct.new( include Aws::Structure end + # A list of fields, comparators and value that you can use to filter the + # crawler runs for a specified crawler. + # + # @note When making an API call, you may pass CrawlsFilter + # data as a hash: + # + # { + # field_name: "CRAWL_ID", # accepts CRAWL_ID, STATE, START_TIME, END_TIME, DPU_HOUR + # filter_operator: "GT", # accepts GT, GE, LT, LE, EQ, NE + # field_value: "GenericString", + # } + # + # @!attribute [rw] field_name + # A key used to filter the crawler runs for a specified crawler. Valid + # values for each of the field names are: + # + # * `CRAWL_ID`\: A string representing the UUID identifier for a + # crawl. + # + # * `STATE`\: A string representing the state of the crawl. + # + # * `START_TIME` and `END_TIME`\: The epoch timestamp in milliseconds. + # + # * `DPU_HOUR`\: The number of data processing unit (DPU) hours used + # for the crawl. + # @return [String] + # + # @!attribute [rw] filter_operator + # A defined comparator that operates on the value. The available + # operators are: + # + # * `GT`\: Greater than. + # + # * `GE`\: Greater than or equal to. + # + # * `LT`\: Less than. + # + # * `LE`\: Less than or equal to. + # + # * `EQ`\: Equal to. + # + # * `NE`\: Not equal to. + # @return [String] + # + # @!attribute [rw] field_value + # The value provided for comparison on the crawl field. + # @return [String] + # + # @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/CrawlsFilter AWS API Documentation + # + class CrawlsFilter < Struct.new( + :field_name, + :filter_operator, + :field_value) + SENSITIVE = [] + include Aws::Structure + end + # @note When making an API call, you may pass CreateBlueprintRequest # data as a hash: # @@ -4652,7 +4771,7 @@ class CreateConnectionResponse < Aws::EmptyStructure; end # @return [Types::LineageConfiguration] # # @!attribute [rw] lake_formation_configuration - # Specifies AWS Lake Formation configuration settings for the crawler. + # Specifies Lake Formation configuration settings for the crawler. # @return [Types::LakeFormationConfiguration] # # @!attribute [rw] configuration @@ -16246,7 +16365,7 @@ class LabelingSetGenerationTaskRunProperties < Struct.new( include Aws::Structure end - # Specifies AWS Lake Formation configuration settings for the crawler. + # Specifies Lake Formation configuration settings for the crawler. # # @note When making an API call, you may pass LakeFormationConfiguration # data as a hash: @@ -16257,8 +16376,8 @@ class LabelingSetGenerationTaskRunProperties < Struct.new( # } # # @!attribute [rw] use_lake_formation_credentials - # Specifies whether to use AWS Lake Formation credentials for the - # crawler instead of the IAM role credentials. + # Specifies whether to use Lake Formation credentials for the crawler + # instead of the IAM role credentials. # @return [Boolean] # # @!attribute [rw] account_id @@ -16480,6 +16599,70 @@ class ListCrawlersResponse < Struct.new( include Aws::Structure end + # @note When making an API call, you may pass ListCrawlsRequest + # data as a hash: + # + # { + # crawler_name: "NameString", # required + # max_results: 1, + # filters: [ + # { + # field_name: "CRAWL_ID", # accepts CRAWL_ID, STATE, START_TIME, END_TIME, DPU_HOUR + # filter_operator: "GT", # accepts GT, GE, LT, LE, EQ, NE + # field_value: "GenericString", + # }, + # ], + # next_token: "Token", + # } + # + # @!attribute [rw] crawler_name + # The name of the crawler whose runs you want to retrieve. + # @return [String] + # + # @!attribute [rw] max_results + # The maximum number of results to return. The default is 20, and + # maximum is 100. + # @return [Integer] + # + # @!attribute [rw] filters + # Filters the crawls by the criteria you specify in a list of + # `CrawlsFilter` objects. + # @return [Array] + # + # @!attribute [rw] next_token + # A continuation token, if this is a continuation call. + # @return [String] + # + # @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/ListCrawlsRequest AWS API Documentation + # + class ListCrawlsRequest < Struct.new( + :crawler_name, + :max_results, + :filters, + :next_token) + SENSITIVE = [] + include Aws::Structure + end + + # @!attribute [rw] crawls + # A list of `CrawlerHistory` objects representing the crawl runs that + # meet your criteria. + # @return [Array] + # + # @!attribute [rw] next_token + # A continuation token for paginating the returned list of tokens, + # returned if the current segment of the list is not the last. + # @return [String] + # + # @see http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/ListCrawlsResponse AWS API Documentation + # + class ListCrawlsResponse < Struct.new( + :crawls, + :next_token) + SENSITIVE = [] + include Aws::Structure + end + # @note When making an API call, you may pass ListCustomEntityTypesRequest # data as a hash: # @@ -24045,7 +24228,7 @@ class UpdateConnectionResponse < Aws::EmptyStructure; end # @return [Types::LineageConfiguration] # # @!attribute [rw] lake_formation_configuration - # Specifies AWS Lake Formation configuration settings for the crawler. + # Specifies Lake Formation configuration settings for the crawler. # @return [Types::LakeFormationConfiguration] # # @!attribute [rw] configuration diff --git a/gems/aws-sdk-rdsdataservice/CHANGELOG.md b/gems/aws-sdk-rdsdataservice/CHANGELOG.md index c9e8312f790..63a65213c31 100644 --- a/gems/aws-sdk-rdsdataservice/CHANGELOG.md +++ b/gems/aws-sdk-rdsdataservice/CHANGELOG.md @@ -1,6 +1,11 @@ Unreleased Changes ------------------ +1.36.0 (2022-06-24) +------------------ + +* Feature - Documentation updates for RDS Data API + 1.35.0 (2022-04-25) ------------------ diff --git a/gems/aws-sdk-rdsdataservice/VERSION b/gems/aws-sdk-rdsdataservice/VERSION index 2aeaa11ee27..39fc130ef85 100644 --- a/gems/aws-sdk-rdsdataservice/VERSION +++ b/gems/aws-sdk-rdsdataservice/VERSION @@ -1 +1 @@ -1.35.0 +1.36.0 diff --git a/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice.rb b/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice.rb index 96d0d370000..993a7ec853d 100644 --- a/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice.rb +++ b/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice.rb @@ -48,6 +48,6 @@ # @!group service module Aws::RDSDataService - GEM_VERSION = '1.35.0' + GEM_VERSION = '1.36.0' end diff --git a/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice/client.rb b/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice/client.rb index bf2772a7117..217196cb613 100644 --- a/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice/client.rb +++ b/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice/client.rb @@ -362,6 +362,18 @@ def initialize(*args) # `transactionID` parameter, changes that result from the call are # committed automatically. # + # There isn't a fixed upper limit on the number of parameter sets. + # However, the maximum size of the HTTP request submitted through the + # Data API is 4 MiB. If the request exceeds this limit, the Data API + # returns an error and doesn't process the request. This 4-MiB limit + # includes the size of the HTTP headers and the JSON notation in the + # request. Thus, the number of parameter sets that you can include + # depends on a combination of factors, such as the size of the SQL + # statement and the size of each parameter set. + # + # The response size limit is 1 MiB. If the call returns more than 1 MiB + # of response data, the call is terminated. + # # @option params [String] :database # The name of the database. # @@ -388,10 +400,19 @@ def initialize(*args) # The name of the database schema. # # @option params [required, String] :secret_arn - # The name or ARN of the secret that enables access to the DB cluster. + # The ARN of the secret that enables access to the DB cluster. Enter the + # database user name and password for the credentials in the secret. + # + # For information about creating the secret, see [Create a database + # secret][1]. + # + # + # + # [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/create_database_secret.html # # @option params [required, String] :sql - # The SQL statement to run. + # The SQL statement to run. Don't include a semicolon (;) at the end of + # the SQL statement. # # @option params [String] :transaction_id # The identifier of a transaction that was started by using the @@ -559,7 +580,15 @@ def commit_transaction(params = {}, options = {}) # # @option params [required, String] :aws_secret_store_arn # The Amazon Resource Name (ARN) of the secret that enables access to - # the DB cluster. + # the DB cluster. Enter the database user name and password for the + # credentials in the secret. + # + # For information about creating the secret, see [Create a database + # secret][1]. + # + # + # + # [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/create_database_secret.html # # @option params [String] :database # The name of the database. @@ -641,7 +670,7 @@ def execute_sql(params = {}, options = {}) # `transactionID` parameter, changes that result from the call are # committed automatically. # - # If the binary response data from the database is more than 1 MB, the + # If the binary response data from the database is more than 1 MB, the # call is terminated. # # @option params [Boolean] :continue_after_timeout @@ -695,7 +724,15 @@ def execute_sql(params = {}, options = {}) # # # @option params [required, String] :secret_arn - # The name or ARN of the secret that enables access to the DB cluster. + # The ARN of the secret that enables access to the DB cluster. Enter the + # database user name and password for the credentials in the secret. + # + # For information about creating the secret, see [Create a database + # secret][1]. + # + # + # + # [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/create_database_secret.html # # @option params [required, String] :sql # The SQL statement to run. @@ -873,7 +910,7 @@ def build_request(operation_name, params = {}) params: params, config: config) context[:gem_name] = 'aws-sdk-rdsdataservice' - context[:gem_version] = '1.35.0' + context[:gem_version] = '1.36.0' Seahorse::Client::Request.new(handlers, context) end diff --git a/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice/types.rb b/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice/types.rb index 2069c2ca37b..ed5c28268ce 100644 --- a/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice/types.rb +++ b/gems/aws-sdk-rdsdataservice/lib/aws-sdk-rdsdataservice/types.rb @@ -156,11 +156,21 @@ class BadRequestException < Struct.new( # @return [String] # # @!attribute [rw] secret_arn - # The name or ARN of the secret that enables access to the DB cluster. + # The ARN of the secret that enables access to the DB cluster. Enter + # the database user name and password for the credentials in the + # secret. + # + # For information about creating the secret, see [Create a database + # secret][1]. + # + # + # + # [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/create_database_secret.html # @return [String] # # @!attribute [rw] sql - # The SQL statement to run. + # The SQL statement to run. Don't include a semicolon (;) at the end + # of the SQL statement. # @return [String] # # @!attribute [rw] transaction_id @@ -400,7 +410,15 @@ class CommitTransactionResponse < Struct.new( # # @!attribute [rw] aws_secret_store_arn # The Amazon Resource Name (ARN) of the secret that enables access to - # the DB cluster. + # the DB cluster. Enter the database user name and password for the + # credentials in the secret. + # + # For information about creating the secret, see [Create a database + # secret][1]. + # + # + # + # [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/create_database_secret.html # @return [String] # # @!attribute [rw] database @@ -556,7 +574,16 @@ class ExecuteSqlResponse < Struct.new( # @return [String] # # @!attribute [rw] secret_arn - # The name or ARN of the secret that enables access to the DB cluster. + # The ARN of the secret that enables access to the DB cluster. Enter + # the database user name and password for the credentials in the + # secret. + # + # For information about creating the secret, see [Create a database + # secret][1]. + # + # + # + # [1]: https://docs.aws.amazon.com/secretsmanager/latest/userguide/create_database_secret.html # @return [String] # # @!attribute [rw] sql