From 10fd22bc99a15b959dc9020f9bed80982dba05c9 Mon Sep 17 00:00:00 2001 From: Enrico Minack Date: Tue, 23 Jan 2024 20:51:47 +0100 Subject: [PATCH] Fix JDBC+Oservation test --- .../sql/execution/datasources/jdbc/JdbcUtils.scala | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala index 7090b9ca078b4..e8fc21a0b7996 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala @@ -968,9 +968,8 @@ object JdbcUtils extends Logging with SQLConfHelper { case Some(n) if n < df.rdd.getNumPartitions => df.coalesce(n) case _ => df } - repartitionedDF.rdd.foreachPartition { iterator => - savePartition(None, iterator, rddSchema, insertStmt, batchSize, dialect, - isolationLevel, options) + repartitionedDF.foreachPartition { iterator: Iterator[Row] => savePartition( + None, iterator, rddSchema, insertStmt, batchSize, dialect, isolationLevel, options) } } @@ -1008,9 +1007,9 @@ object JdbcUtils extends Logging with SQLConfHelper { case Some(n) if n < df.rdd.getNumPartitions => df.coalesce(n) case _ => df } - repartitionedDF.rdd.foreachPartition { iterator => upsertPartition( - table, iterator, rddSchema, tableSchema, isCaseSensitive, batchSize, - dialectWithMerge, isolationLevel, options) + repartitionedDF.foreachPartition { iterator: Iterator[Row] => upsertPartition( + table, iterator, rddSchema, tableSchema, isCaseSensitive, batchSize, dialectWithMerge, + isolationLevel, options) } }