diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index ccb1b28..ec2854a 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -15,10 +15,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- - name: Set up JDK 11
- uses: actions/setup-java@v1
+ - name: Set up Maven Central Repository
+ uses: actions/setup-java@v3
with:
- java-version: 11
+ java-version: 17
+ distribution: "adopt"
- name: Set current version
run: mvn -B versions:set -DnewVersion=${GITHUB_REF##*/} -DprocessAllModules -DgenerateBackupPoms=false
- uses: s4u/maven-settings-action@v2.8.0
diff --git a/Dockerfile b/Dockerfile
index 53a2611..be89091 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM openjdk:11-jre-slim
+FROM openjdk:17-jdk-slim
EXPOSE 8080
COPY target/lib /lib/
@@ -13,4 +13,4 @@ COPY target/lib/postgis-jdbc-*.jar /postgis-jdbc.jar
COPY target/trevas-lab*.jar.original /lib/trevas-lab.jar
-ENTRYPOINT ["java", "-cp", "/lib/*", "fr.insee.trevas.lab.TrevasLabApplication"]
\ No newline at end of file
+ENTRYPOINT ["java", "-cp", "/lib/*", "--add-exports", "java.base/sun.nio.ch=ALL-UNNAMED", "fr.insee.trevas.lab.TrevasLabApplication"]
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 5e1644f..6f65f32 100644
--- a/pom.xml
+++ b/pom.xml
@@ -5,12 +5,12 @@
org.springframework.boot
spring-boot-starter-parent
- 2.4.1
+ 2.7.14
fr.insee
trevas-lab
- 0.4.1
+ 0.4.2
Trevas Lab
VTL API calling Trevas
@@ -22,8 +22,9 @@
- 11
- 1.1.1
+ 17
+ 1.2.0
+ 2.16.0
@@ -31,7 +32,7 @@
org.codehaus.janino
janino
- 3.0.16
+ 3.1.10
@@ -117,13 +118,19 @@
org.apache.spark
spark-kubernetes_2.12
- 3.3.1
+ 3.5.0
+
+
+ org.apache.logging.log4j
+ log4j-slf4j2-impl
+
+
org.postgresql
postgresql
- 42.6.0
+ 42.7.0
net.postgis
@@ -132,35 +139,35 @@
- mysql
- mysql-connector-java
- 8.0.33
+ com.mysql
+ mysql-connector-j
+ 8.2.0
com.fasterxml.jackson.core
jackson-core
- 2.15.2
+ ${jackson.version}
com.fasterxml.jackson.core
jackson-databind
- 2.15.2
+ ${jackson.version}
com.fasterxml.jackson.core
jackson-annotations
- 2.15.2
+ ${jackson.version}
com.fasterxml.jackson.module
jackson-module-scala_2.12
- 2.15.2
+ ${jackson.version}
com.fasterxml.jackson.datatype
jackson-datatype-jsr310
- 2.15.2
+ ${jackson.version}
@@ -216,6 +223,11 @@
org.springframework.boot
spring-boot-maven-plugin
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 3.2.2
+
maven-dependency-plugin
@@ -230,6 +242,17 @@
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ 3.11.0
+
+
+ --add-exports
+ java.base/sun.nio.ch=ALL-UNNAMED
+
+
+
diff --git a/src/main/java/fr/insee/trevas/lab/utils/Utils.java b/src/main/java/fr/insee/trevas/lab/utils/Utils.java
index 4d498ba..69d199d 100644
--- a/src/main/java/fr/insee/trevas/lab/utils/Utils.java
+++ b/src/main/java/fr/insee/trevas/lab/utils/Utils.java
@@ -4,6 +4,7 @@
import fr.insee.trevas.lab.model.QueriesForBindingsToSave;
import fr.insee.trevas.lab.model.S3ForBindings;
import fr.insee.vtl.model.InMemoryDataset;
+import fr.insee.vtl.model.PersistentDataset;
import fr.insee.vtl.spark.SparkDataset;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
@@ -40,7 +41,7 @@ public static ScriptEngine initEngineWithSpark(Bindings bindings, SparkSession s
public static Bindings getBindings(Bindings input) {
Bindings output = new SimpleBindings();
input.forEach((k, v) -> {
- if (!k.startsWith("$")) output.put(k, v);
+ if (v instanceof PersistentDataset) output.put(k, v);
});
return output;
}
@@ -68,9 +69,10 @@ public static SparkConf loadSparkConfig(String stringPath) {
public static Bindings getSparkBindings(Bindings input, Integer limit) {
Bindings output = new SimpleBindings();
input.forEach((k, v) -> {
- if (!k.startsWith("$")) {
- if (v instanceof SparkDataset) {
- Dataset sparkDs = ((SparkDataset) v).getSparkDataset();
+ if (v instanceof PersistentDataset) {
+ fr.insee.vtl.model.Dataset ds = ((PersistentDataset) v).getDelegate();
+ if (ds instanceof SparkDataset) {
+ Dataset sparkDs = ((SparkDataset) ds).getSparkDataset();
if (limit != null) {
SparkDataset sparkDataset = new SparkDataset(sparkDs.limit(limit));
InMemoryDataset im = new InMemoryDataset(
@@ -78,7 +80,7 @@ public static Bindings getSparkBindings(Bindings input, Integer limit) {
sparkDataset.getDataStructure());
output.put(k, im);
} else output.put(k, new SparkDataset(sparkDs)); // useless
- } else output.put(k, v);
+ }
}
});
return output;