Skip to content

Commit

Permalink
[SPARK-49451][SQL][FOLLOW-UP] Improve duplicate key exception test
Browse files Browse the repository at this point in the history
### What changes were proposed in this pull request?

This test improves a unit test case where json strings with duplicate keys are prohibited by checking the cause of the exception instead of just the root exception.

### Why are the changes needed?

Earlier, the test only checked the top error class but not the cause of the error which should be `VARIANT_DUPLICATE_KEY`.

### Does this PR introduce _any_ user-facing change?

No.

### How was this patch tested?

### Was this patch authored or co-authored using generative AI tooling?

NA

Closes apache#48464 from harshmotw-db/harshmotw-db/minor_test_fix.

Authored-by: Harsh Motwani <harsh.motwani@databricks.com>
Signed-off-by: Max Gekk <max.gekk@gmail.com>
  • Loading branch information
harshmotw-db authored and MaxGekk committed Oct 15, 2024
1 parent 217e0da commit c3176a7
Showing 1 changed file with 18 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
*/
package org.apache.spark.sql

import org.apache.spark.SparkThrowable
import org.apache.spark.{SparkException, SparkRuntimeException}
import org.apache.spark.sql.QueryTest.sameRows
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Cast, Literal}
Expand Down Expand Up @@ -359,16 +359,24 @@ class VariantEndToEndSuite extends QueryTest with SharedSparkSession {
val expectedMetadata: Array[Byte] = Array(VERSION, 3, 0, 1, 2, 3, 'a', 'b', 'c')
assert(actual === new VariantVal(expectedValue, expectedMetadata))
}
withSQLConf(SQLConf.VARIANT_ALLOW_DUPLICATE_KEYS.key -> "false") {
val df = Seq(json).toDF("j")
.selectExpr("from_json(j,'variant')")
checkError(
exception = intercept[SparkThrowable] {
// Check whether the parse_json and from_json expressions throw the correct exception.
Seq("from_json(j, 'variant')", "parse_json(j)").foreach { expr =>
withSQLConf(SQLConf.VARIANT_ALLOW_DUPLICATE_KEYS.key -> "false") {
val df = Seq(json).toDF("j").selectExpr(expr)
val exception = intercept[SparkException] {
df.collect()
},
condition = "MALFORMED_RECORD_IN_PARSING.WITHOUT_SUGGESTION",
parameters = Map("badRecord" -> json, "failFastMode" -> "FAILFAST")
)
}
checkError(
exception = exception,
condition = "MALFORMED_RECORD_IN_PARSING.WITHOUT_SUGGESTION",
parameters = Map("badRecord" -> json, "failFastMode" -> "FAILFAST")
)
checkError(
exception = exception.getCause.asInstanceOf[SparkRuntimeException],
condition = "VARIANT_DUPLICATE_KEY",
parameters = Map("key" -> "a")
)
}
}
}
}

0 comments on commit c3176a7

Please sign in to comment.