Skip to content

Commit

Permalink
Keep updated versions of libraries
Browse files Browse the repository at this point in the history
  • Loading branch information
pflooky committed Oct 17, 2024
1 parent 1d661af commit ce32ae6
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 18 deletions.
4 changes: 2 additions & 2 deletions app/build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -186,9 +186,9 @@ dependencies {
basicImpl("com.globalmentor:hadoop-bare-naked-local-fs:0.1.0")

// misc
basicImpl("joda-time:joda-time:2.12.5")
basicImpl("joda-time:joda-time:2.12.7")
basicImpl("com.google.guava:guava:33.2.1-jre")
basicImpl("com.github.pureconfig:pureconfig_$scalaVersion:0.17.2") {
basicImpl("com.github.pureconfig:pureconfig_$scalaVersion:0.17.6") {
exclude(group = "org.scala-lang")
}
basicImpl("com.fasterxml.jackson.core:jackson-databind:2.15.3") {
Expand Down
30 changes: 15 additions & 15 deletions app/src/main/resources/application.conf
Original file line number Diff line number Diff line change
Expand Up @@ -82,21 +82,21 @@ runtime {
master = "local[*]"
master = ${?DATA_CATERER_MASTER}
config {
"spark.sql.cbo.enabled" = "true"
"spark.sql.adaptive.enabled" = "true"
"spark.sql.cbo.planStats.enabled" = "true"
"spark.sql.legacy.allowUntypedScalaUDF" = "true"
"spark.sql.legacy.allowParameterlessCount" = "true"
"spark.sql.statistics.histogram.enabled" = "true"
"spark.sql.shuffle.partitions" = "10"
"spark.sql.catalog.postgres" = ""
"spark.sql.catalog.cassandra" = "com.datastax.spark.connector.datasource.CassandraCatalog"
"spark.sql.catalog.iceberg" = "org.apache.iceberg.spark.SparkCatalog"
"spark.sql.catalog.iceberg.type" = "hadoop"
"spark.hadoop.fs.s3a.directory.marker.retention" = "keep"
"spark.hadoop.fs.s3a.bucket.all.committer.magic.enabled" = "true"
"spark.hadoop.fs.hdfs.impl" = "org.apache.hadoop.hdfs.DistributedFileSystem"
"spark.hadoop.fs.file.impl" = "com.globalmentor.apache.hadoop.fs.BareLocalFileSystem"
"spark.sql.cbo.enabled" = "true",
"spark.sql.adaptive.enabled" = "true",
"spark.sql.cbo.planStats.enabled" = "true",
"spark.sql.legacy.allowUntypedScalaUDF" = "true",
"spark.sql.legacy.allowParameterlessCount" = "true",
"spark.sql.statistics.histogram.enabled" = "true",
"spark.sql.shuffle.partitions" = "10",
"spark.sql.catalog.postgres" = "",
"spark.sql.catalog.cassandra" = "com.datastax.spark.connector.datasource.CassandraCatalog",
"spark.sql.catalog.iceberg" = "org.apache.iceberg.spark.SparkCatalog",
"spark.sql.catalog.iceberg.type" = "hadoop",
"spark.hadoop.fs.s3a.directory.marker.retention" = "keep",
"spark.hadoop.fs.s3a.bucket.all.committer.magic.enabled" = "true",
"spark.hadoop.fs.hdfs.impl" = "org.apache.hadoop.hdfs.DistributedFileSystem",
"spark.hadoop.fs.file.impl" = "com.globalmentor.apache.hadoop.fs.BareLocalFileSystem",
"spark.sql.extensions" = "io.delta.sql.DeltaSparkSessionExtension,org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ object ConfigParser {

private val LOGGER = Logger.getLogger(getClass.getName)

lazy val applicationType: String = "advanced"
lazy val config: Config = getConfig
lazy val flagsConfig: FlagsConfig = ObjectMapperUtil.jsonObjectMapper.convertValue(config.getObject("flags").unwrapped(), classOf[FlagsConfig])
lazy val foldersConfig: FoldersConfig = ObjectMapperUtil.jsonObjectMapper.convertValue(config.getObject("folders").unwrapped(), classOf[FoldersConfig])
Expand Down

0 comments on commit ce32ae6

Please sign in to comment.