From 59cd91f5cf69ff9a290d7cec10f38a4b5c8d8fde Mon Sep 17 00:00:00 2001 From: Alexandre Dutra Date: Tue, 1 Aug 2023 14:58:03 +0200 Subject: [PATCH] Fix broken assertion in AbstractNessieSparkSqlExtensionTest The assertion was modified in #7308 with a workaround to avoid compilation failures. This commit reintroduces the right assertion, which became possible when Iceberg updated Nessie to 0.61+. --- .../AbstractNessieSparkSqlExtensionTest.java | 16 +-- .../plans/logical/NessieCommandOutputs.scala | 0 .../plans/logical/NessieCommandOutputs.scala | 126 +++++++++++++++++ .../plans/logical/NessieCommandOutputs.scala | 126 +++++++++++++++++ .../plans/logical/NessieCommandOutputs.scala | 126 +++++++++++++++++ .../plans/logical/NessieCommandOutputs.scala | 129 ++++++++++++++++++ 6 files changed, 509 insertions(+), 14 deletions(-) rename integrations/{spark-extensions-base => spark-extensions/v3.1}/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala (100%) create mode 100644 integrations/spark-extensions/v3.2/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala create mode 100644 integrations/spark-extensions/v3.3/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala create mode 100644 integrations/spark-extensions/v3.4/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala create mode 100644 integrations/spark-extensions/v3.5/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala diff --git a/integrations/spark-extensions-basetests/src/main/java/org/projectnessie/spark/extensions/AbstractNessieSparkSqlExtensionTest.java b/integrations/spark-extensions-basetests/src/main/java/org/projectnessie/spark/extensions/AbstractNessieSparkSqlExtensionTest.java index 0798c61d59e..50b200769fd 100644 --- a/integrations/spark-extensions-basetests/src/main/java/org/projectnessie/spark/extensions/AbstractNessieSparkSqlExtensionTest.java +++ b/integrations/spark-extensions-basetests/src/main/java/org/projectnessie/spark/extensions/AbstractNessieSparkSqlExtensionTest.java @@ -270,13 +270,7 @@ additionalRefName, defaultBranch(), currentHash)) "ASSIGN BRANCH %s TO %s AT %s IN nessie", additionalRefName, defaultBranch(), invalidHash)) .isInstanceOf(IllegalArgumentException.class) - // TODO enable this when Nessie lib will be >= 0.61 - // .hasMessage( - // Validation.HASH_OR_RELATIVE_COMMIT_SPEC_MESSAGE - // + " - but was: " - // + invalidHash) - .hasMessageContaining(Validation.HASH_RULE) - .hasMessageContaining(" - but was: " + invalidHash); + .hasMessage(Validation.HASH_OR_RELATIVE_COMMIT_SPEC_MESSAGE + " - but was: " + invalidHash); assertThatThrownBy( () -> sql( @@ -324,13 +318,7 @@ additionalRefName, defaultBranch(), currentHash)) "ASSIGN TAG %s TO %s AT %s IN nessie", additionalRefName, defaultBranch(), invalidHash)) .isInstanceOf(IllegalArgumentException.class) - // TODO enable this when Nessie lib will be >= 0.61 - // .hasMessage( - // Validation.HASH_OR_RELATIVE_COMMIT_SPEC_MESSAGE - // + " - but was: " - // + invalidHash) - .hasMessageContaining(Validation.HASH_RULE) - .hasMessageContaining(" - but was: " + invalidHash); + .hasMessage(Validation.HASH_OR_RELATIVE_COMMIT_SPEC_MESSAGE + " - but was: " + invalidHash); assertThatThrownBy( () -> sql( diff --git a/integrations/spark-extensions-base/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala b/integrations/spark-extensions/v3.1/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala similarity index 100% rename from integrations/spark-extensions-base/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala rename to integrations/spark-extensions/v3.1/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala diff --git a/integrations/spark-extensions/v3.2/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala b/integrations/spark-extensions/v3.2/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala new file mode 100644 index 00000000000..14609755b03 --- /dev/null +++ b/integrations/spark-extensions/v3.2/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala @@ -0,0 +1,126 @@ +/* + * Copyright (C) 2022 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.sql.catalyst.plans.logical + +import org.apache.spark.sql.catalyst.expressions.AttributeReference +import org.apache.spark.sql.types.{DataTypes, Metadata, StructField, StructType} + +object NessieCommandOutputs { + def referenceOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "refType", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "name", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + ).toAttributes + + def simpleReferenceOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "name", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + ).toAttributes + + def dropReferenceOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "status", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + ).toAttributes + + def showLogOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "author", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "committer", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "message", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "signedOffBy", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "authorTime", + DataTypes.TimestampType, + nullable = false, + Metadata.empty + ), + StructField( + "committerTime", + DataTypes.TimestampType, + nullable = false, + Metadata.empty + ), + StructField( + "properties", + DataTypes + .createMapType(DataTypes.StringType, DataTypes.StringType, false), + nullable = false, + Metadata.empty + ) + ) + ).toAttributes +} diff --git a/integrations/spark-extensions/v3.3/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala b/integrations/spark-extensions/v3.3/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala new file mode 100644 index 00000000000..14609755b03 --- /dev/null +++ b/integrations/spark-extensions/v3.3/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala @@ -0,0 +1,126 @@ +/* + * Copyright (C) 2022 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.sql.catalyst.plans.logical + +import org.apache.spark.sql.catalyst.expressions.AttributeReference +import org.apache.spark.sql.types.{DataTypes, Metadata, StructField, StructType} + +object NessieCommandOutputs { + def referenceOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "refType", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "name", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + ).toAttributes + + def simpleReferenceOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "name", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + ).toAttributes + + def dropReferenceOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "status", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + ).toAttributes + + def showLogOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "author", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "committer", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "message", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "signedOffBy", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "authorTime", + DataTypes.TimestampType, + nullable = false, + Metadata.empty + ), + StructField( + "committerTime", + DataTypes.TimestampType, + nullable = false, + Metadata.empty + ), + StructField( + "properties", + DataTypes + .createMapType(DataTypes.StringType, DataTypes.StringType, false), + nullable = false, + Metadata.empty + ) + ) + ).toAttributes +} diff --git a/integrations/spark-extensions/v3.4/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala b/integrations/spark-extensions/v3.4/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala new file mode 100644 index 00000000000..14609755b03 --- /dev/null +++ b/integrations/spark-extensions/v3.4/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala @@ -0,0 +1,126 @@ +/* + * Copyright (C) 2022 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.sql.catalyst.plans.logical + +import org.apache.spark.sql.catalyst.expressions.AttributeReference +import org.apache.spark.sql.types.{DataTypes, Metadata, StructField, StructType} + +object NessieCommandOutputs { + def referenceOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "refType", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "name", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + ).toAttributes + + def simpleReferenceOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "name", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + ).toAttributes + + def dropReferenceOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "status", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + ).toAttributes + + def showLogOutput(): Seq[AttributeReference] = new StructType( + Array[StructField]( + StructField( + "author", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "committer", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "message", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "signedOffBy", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "authorTime", + DataTypes.TimestampType, + nullable = false, + Metadata.empty + ), + StructField( + "committerTime", + DataTypes.TimestampType, + nullable = false, + Metadata.empty + ), + StructField( + "properties", + DataTypes + .createMapType(DataTypes.StringType, DataTypes.StringType, false), + nullable = false, + Metadata.empty + ) + ) + ).toAttributes +} diff --git a/integrations/spark-extensions/v3.5/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala b/integrations/spark-extensions/v3.5/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala new file mode 100644 index 00000000000..8dfb85a6514 --- /dev/null +++ b/integrations/spark-extensions/v3.5/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/NessieCommandOutputs.scala @@ -0,0 +1,129 @@ +/* + * Copyright (C) 2022 Dremio + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.spark.sql.catalyst.plans.logical + +import org.apache.spark.sql.catalyst.expressions.AttributeReference +import org.apache.spark.sql.types.{DataTypes, Metadata, StructField, StructType} + +object NessieCommandOutputs { + private def structToAttributes(struct: StructType): Seq[AttributeReference] = + struct.map(f => AttributeReference(f.name, f.dataType, f.nullable, f.metadata)()) + + def referenceOutput(): Seq[AttributeReference] = structToAttributes(new StructType( + Array[StructField]( + StructField( + "refType", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "name", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + )) + + def simpleReferenceOutput(): Seq[AttributeReference] = structToAttributes(new StructType( + Array[StructField]( + StructField( + "name", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + )) + + def dropReferenceOutput(): Seq[AttributeReference] = structToAttributes(new StructType( + Array[StructField]( + StructField( + "status", + DataTypes.StringType, + nullable = false, + Metadata.empty + ) + ) + )) + + def showLogOutput(): Seq[AttributeReference] = structToAttributes(new StructType( + Array[StructField]( + StructField( + "author", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "committer", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "hash", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "message", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "signedOffBy", + DataTypes.StringType, + nullable = false, + Metadata.empty + ), + StructField( + "authorTime", + DataTypes.TimestampType, + nullable = false, + Metadata.empty + ), + StructField( + "committerTime", + DataTypes.TimestampType, + nullable = false, + Metadata.empty + ), + StructField( + "properties", + DataTypes + .createMapType(DataTypes.StringType, DataTypes.StringType, false), + nullable = false, + Metadata.empty + ) + ) + )) +}