-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Implement generic Stream Reader + tests for Iceberg and Delta compati…
…bility
- Loading branch information
1 parent
7c40774
commit d7bd30a
Showing
2 changed files
with
73 additions
and
16 deletions.
There are no files selected for viewing
43 changes: 33 additions & 10 deletions
43
src/main/scala/com/metabolic/data/core/services/spark/reader/table/GenericReader.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,27 +1,50 @@ | ||
package com.metabolic.data.core.services.spark.reader.table | ||
|
||
import com.metabolic.data.core.services.spark.reader.DataframeUnifiedReader | ||
import org.apache.logging.log4j.scala.Logging | ||
import org.apache.spark.sql.delta.implicits.stringEncoder | ||
import org.apache.spark.sql.functions.col | ||
import org.apache.spark.sql.{DataFrame, SparkSession} | ||
|
||
class GenericReader(fqn: String) extends DataframeUnifiedReader { | ||
class GenericReader(fqn: String) extends DataframeUnifiedReader with Logging{ | ||
|
||
override val input_identifier: String = fqn | ||
|
||
private def getTableProvider(spark: SparkSession): String = { | ||
spark.sql(s"DESCRIBE FORMATTED $input_identifier") | ||
.filter(col("col_name").contains("Provider")) | ||
.select("data_type") | ||
.as[String] | ||
.first() | ||
} | ||
|
||
override def readBatch(spark: SparkSession): DataFrame = { | ||
//Generic for Delta Lake and Iceberg tables using fqn | ||
spark.table(input_identifier) | ||
} | ||
|
||
override def readStream(spark: SparkSession): DataFrame = { | ||
//TODO: addformat delta readstream | ||
//format Delta | ||
//spark.readStream.format("delta").table("campaign_all_delta_stream") | ||
|
||
//format Iceberg | ||
spark.readStream | ||
.format("iceberg") | ||
.option("stream-from-timestamp", (System.currentTimeMillis() - 3600000).toString) | ||
.load(input_identifier) | ||
|
||
val provider = getTableProvider(spark) | ||
provider match { | ||
case "iceberg" => | ||
logger.info(s"Reading Iceberg Table source ${input_identifier}") | ||
spark.readStream | ||
.format("iceberg") | ||
.option("stream-from-timestamp", (System.currentTimeMillis() - 3600000).toString) | ||
.load(input_identifier) | ||
|
||
case "delta" => | ||
logger.info(s"Reading Delta Table source $input_identifier") | ||
//TODO: add format delta readstream | ||
spark.readStream | ||
.format("delta") | ||
.table(input_identifier) | ||
|
||
case unknown => | ||
logger.warn(s"Table source $provider not supported for table $input_identifier") | ||
spark.emptyDataFrame | ||
} | ||
} | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters