diff --git a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md index 2323a45153c7e..70224ee7bee10 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-1_2-12/CHANGELOG.md @@ -3,6 +3,7 @@ ### 4.8.0-beta.1 (Unreleased) #### Features Added +* Added an API to determine installed version of the Cosmos Spark connector (`CosmosItemsDataSource.version`/`CosmosChangeFeedDataSource.version`).See [PR 27709](https://github.com/Azure/azure-sdk-for-java/pull/27709) #### Breaking Changes diff --git a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md index a5780c5d2766e..8c6c47ccc72e0 100644 --- a/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md +++ b/sdk/cosmos/azure-cosmos-spark_3-2_2-12/CHANGELOG.md @@ -3,6 +3,7 @@ ### 4.8.0-beta.1 (Unreleased) #### Features Added +* Added an API to determine installed version of the Cosmos Spark connector (`CosmosItemsDataSource.version`/`CosmosChangeFeedDataSource.version`).See [PR 27709](https://github.com/Azure/azure-sdk-for-java/pull/27709) #### Breaking Changes diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosChangeFeedDataSource.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosChangeFeedDataSource.scala index 7476982004611..2afa9320723e3 100644 --- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosChangeFeedDataSource.scala +++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosChangeFeedDataSource.scala @@ -76,3 +76,13 @@ class CosmosChangeFeedDataSource */ override def supportsExternalMetadata(): Boolean = true } + +object CosmosChangeFeedDataSource { + /** + * Easy way to validate the version of the Cosmos Data Source + * @return the version of the Cosmos Data Source + */ + def version : String = { + CosmosConstants.currentVersion + } +} \ No newline at end of file diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosItemsDataSource.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosItemsDataSource.scala index bc8a23a64770f..1f9cc5d437d59 100644 --- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosItemsDataSource.scala +++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosItemsDataSource.scala @@ -27,31 +27,32 @@ class CosmosItemsDataSource extends DataSourceRegister with TableProvider with B private lazy val sparkSession = SparkSession.active /** - * Infer the schema of the table identified by the given options. - * @param options an immutable case-insensitive string-to-string - * @return StructType inferred schema - */ + * Infer the schema of the table identified by the given options. + * + * @param options an immutable case-insensitive string-to-string + * @return StructType inferred schema + */ override def inferSchema(options: CaseInsensitiveStringMap): StructType = { new ItemsTable(sparkSession, Array.empty, None, None, options).schema() } /** - * Represents the format that this data source provider uses. - */ + * Represents the format that this data source provider uses. + */ override def shortName(): String = CosmosConstants.Names.ItemsDataSourceShortName /** - * Return a `Table` instance with the specified table schema, partitioning and properties - * to do read/write. The returned table should report the same schema and partitioning with the - * specified ones, or Spark may fail the operation. - * - * @param schema The specified table schema. - * @param partitioning The specified table partitioning. - * @param properties The specified table properties. It's case preserving (contains exactly what - * users specified) and implementations are free to use it case sensitively or - * insensitively. It should be able to identify a table, e.g. file path, Kafka - * topic name, etc. - */ + * Return a `Table` instance with the specified table schema, partitioning and properties + * to do read/write. The returned table should report the same schema and partitioning with the + * specified ones, or Spark may fail the operation. + * + * @param schema The specified table schema. + * @param partitioning The specified table partitioning. + * @param properties The specified table properties. It's case preserving (contains exactly what + * users specified) and implementations are free to use it case sensitively or + * insensitively. It should be able to identify a table, e.g. file path, Kafka + * topic name, etc. + */ override def getTable(schema: StructType, partitioning: Array[Transform], properties: util.Map[String, String]): Table = { val diagnostics = DiagnosticsConfig.parseDiagnosticsConfig(properties.asScala.toMap) // getTable - This is used for loading table with user specified schema and other transformations. @@ -65,9 +66,19 @@ class CosmosItemsDataSource extends DataSourceRegister with TableProvider with B } /** - * Returns true if the source has the ability of accepting external table metadata when getting - * tables. The external table metadata includes user-specified schema from - * `DataFrameReader`/`DataStreamReader` and schema/partitioning stored in Spark catalog. - */ + * Returns true if the source has the ability of accepting external table metadata when getting + * tables. The external table metadata includes user-specified schema from + * `DataFrameReader`/`DataStreamReader` and schema/partitioning stored in Spark catalog. + */ override def supportsExternalMetadata(): Boolean = true } + +object CosmosItemsDataSource { + /** + * Easy way to validate the version of the Cosmos Data Source + * @return the version of teh Cosmos Data Source + */ + def version : String = { + CosmosConstants.currentVersion + } +} diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala index d1f7e95d427c5..e04a69ec114fd 100644 --- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala +++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EChangeFeedITest.scala @@ -21,6 +21,10 @@ class SparkE2EChangeFeedITest this.reinitializeContainer() } + "spark change feed DataSource version" can "be determined" in { + CosmosChangeFeedDataSource.version shouldEqual CosmosConstants.currentVersion + } + "spark change feed query (incremental)" can "use default schema" in { val cosmosEndpoint = TestConfigurations.HOST val cosmosMasterKey = TestConfigurations.MASTER_KEY diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EQueryITest.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EQueryITest.scala index 7e79ff8351a80..f710ec3f68fae 100644 --- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EQueryITest.scala +++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/test/scala/com/azure/cosmos/spark/SparkE2EQueryITest.scala @@ -4,7 +4,7 @@ package com.azure.cosmos.spark import java.util.UUID import com.azure.cosmos.implementation.{TestConfigurations, Utils} -import com.azure.cosmos.models.{CosmosItemResponse, PartitionKey} +import com.azure.cosmos.models.PartitionKey import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.databind.node.ObjectNode @@ -33,6 +33,10 @@ class SparkE2EQueryITest // to ensure we don't do sub-range feed-range // once emulator fixed switch back to default partitioning. + "spark items DataSource version" can "be determined" in { + CosmosItemsDataSource.version shouldEqual CosmosConstants.currentVersion + } + "spark query" can "basic nested query" in { val cosmosEndpoint = TestConfigurations.HOST val cosmosMasterKey = TestConfigurations.MASTER_KEY