From 185ce014ac0ae11a59bb90cfe1e59a48039f4f67 Mon Sep 17 00:00:00 2001 From: Edward Cho <114528615+eycho-am@users.noreply.github.com> Date: Wed, 21 Feb 2024 10:49:02 -0500 Subject: [PATCH] Skip SparkTableMetricsRepositoryTest iceberg test when SupportsRowLevelOperations is not available (#536) --- .../SparkTableMetricsRepositoryTest.scala | 35 ++++++++++--------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/src/test/scala/com/amazon/deequ/repository/sparktable/SparkTableMetricsRepositoryTest.scala b/src/test/scala/com/amazon/deequ/repository/sparktable/SparkTableMetricsRepositoryTest.scala index 667b5b50..8e0d0aac 100644 --- a/src/test/scala/com/amazon/deequ/repository/sparktable/SparkTableMetricsRepositoryTest.scala +++ b/src/test/scala/com/amazon/deequ/repository/sparktable/SparkTableMetricsRepositoryTest.scala @@ -101,21 +101,24 @@ class SparkTableMetricsRepositoryTest extends AnyWordSpec } "save and load to iceberg a single metric" in withSparkSessionIcebergCatalog { spark => { - val resultKey = ResultKey(System.currentTimeMillis(), Map("tag" -> "value")) - val metric = DoubleMetric(Entity.Column, "m1", "", Try(100)) - val context = AnalyzerContext(Map(analyzer -> metric)) - - val repository = new SparkTableMetricsRepository(spark, "local.metrics_table") - // Save the metric - repository.save(resultKey, context) - - // Load the metric - val loadedContext = repository.loadByKey(resultKey) - - assert(loadedContext.isDefined) - assert(loadedContext.get.metric(analyzer).contains(metric)) - } - - } + // The SupportsRowLevelOperations class is available from spark 3.3 + // We should skip this test for lower spark versions + val className = "org.apache.spark.sql.connector.catalog.SupportsRowLevelOperations" + if (Try(Class.forName(className)).isSuccess) { + val resultKey = ResultKey(System.currentTimeMillis(), Map("tag" -> "value")) + val metric = DoubleMetric(Entity.Column, "m1", "", Try(100)) + val context = AnalyzerContext(Map(analyzer -> metric)) + + val repository = new SparkTableMetricsRepository(spark, "local.metrics_table") + // Save the metric + repository.save(resultKey, context) + + // Load the metric + val loadedContext = repository.loadByKey(resultKey) + + assert(loadedContext.isDefined) + assert(loadedContext.get.metric(analyzer).contains(metric)) + } + } } } }