diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 000000000..5edd289f2 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,149 @@ +stages: + - build + +variables: + # This will suppress any download for dependencies and plugins or upload messages which would clutter the console log. + # `showDateTime` will show the passed time in milliseconds. You need to specify `--batch-mode` to make this work. + MAVEN_OPTS: "-Dhttp.proxyHost=${http_proxy_host} -Dhttp.proxyPort=${http_proxy_port} -Dhttps.proxyHost=${http_proxy_host} -Dhttps.proxyPort=${http_proxy_port} -Dhttps.protocols=TLSv1.2 -Dmaven.repo.local=$CI_PROJECT_DIR/.m2/repository -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=WARN -Dorg.slf4j.simpleLogger.showDateTime=true -Djava.awt.headless=true -XX:ReservedCodeCacheSize=512m" + # As of Maven 3.3.0 instead of this you may define these options in `.mvn/maven.config` so the same config is used + # when running from the command line. + # `installAtEnd` and `deployAtEnd` are only effective with recent version of the corresponding plugins. + MAVEN_CLI_OPTS: "--batch-mode --errors --fail-at-end --show-version -DinstallAtEnd=true -DdeployAtEnd=true" + +image: dimajix/maven-npm:latest + +# Cache downloaded dependencies and plugins between builds. +cache: + key: flowman-${CI_JOB_NAME} + paths: + - .m2/repository + - .npm + + +# Build site and reports +build-site: + stage: build + script: 'mvn ${MAVEN_CLI_OPTS} clean install -DskipTests && mvn ${MAVEN_CLI_OPTS} site' + artifacts: + name: "flowman-site" + paths: + - target/site + - flowman-*/target/site + - flowman-plugins/*/target/site + expire_in: 5 days + + +# Default build variant +build-default: + stage: build + script: 'mvn ${MAVEN_CLI_OPTS} clean package' + except: + - pushes + artifacts: + name: "flowman-dist-default" + paths: + - flowman-dist/target/flowman-dist-*-bin.tar.gz + expire_in: 5 days + +# List additional build variants (some of them will be built on pushes) +build-hadoop2.6-spark2.3: + stage: build + script: 'mvn ${MAVEN_CLI_OPTS} clean package -Phadoop-2.6 -Pspark-2.3' + artifacts: + name: "flowman-dist-hadoop2.6-spark2.3" + paths: + - flowman-dist/target/flowman-dist-*-bin.tar.gz + expire_in: 5 days + +build-hadoop2.6-spark2.4: + stage: build + except: + - pushes + script: 'mvn ${MAVEN_CLI_OPTS} clean package -Phadoop-2.6 -Pspark-2.4' + artifacts: + name: "flowman-dist-hadoop2.6-spark2.4" + paths: + - flowman-dist/target/flowman-dist-*-bin.tar.gz + expire_in: 5 days + +build-hadoop2.9-spark2.4: + stage: build + script: 'mvn ${MAVEN_CLI_OPTS} clean package -Phadoop-2.9 -Pspark-2.4' + artifacts: + name: "flowman-dist-hadoop2.9-spark2.4" + paths: + - flowman-dist/target/flowman-dist-*-bin.tar.gz + expire_in: 5 days + +build-hadoop3.1-spark2.4: + stage: build + except: + - pushes + script: 'mvn ${MAVEN_CLI_OPTS} clean package -Phadoop-3.1 -Pspark-2.4' + artifacts: + name: "flowman-dist-hadoop3.1-spark2.4" + paths: + - flowman-dist/target/flowman-dist-*-bin.tar.gz + expire_in: 5 days + +build-hadoop2.9-spark3.0: + stage: build + except: + - pushes + script: 'mvn ${MAVEN_CLI_OPTS} clean package -Phadoop-2.9 -Pspark-3.0' + artifacts: + name: "flowman-dist-hadoop2.9-spark3.0" + paths: + - flowman-dist/target/flowman-dist-*-bin.tar.gz + expire_in: 5 days + +build-hadoop3.1-spark3.0: + stage: build + except: + - pushes + script: 'mvn ${MAVEN_CLI_OPTS} clean package -Phadoop-3.1 -Pspark-3.0' + artifacts: + name: "flowman-dist-hadoop3.1-spark3.0" + paths: + - flowman-dist/target/flowman-dist-*-bin.tar.gz + expire_in: 5 days + +build-hadoop3.2-spark3.0: + stage: build + script: 'mvn ${MAVEN_CLI_OPTS} clean package -Phadoop-3.2 -Pspark-3.0' + artifacts: + name: "flowman-dist-hadoop3.2-spark3.0" + paths: + - flowman-dist/target/flowman-dist-*-bin.tar.gz + expire_in: 5 days + +build-hadoop3.2-spark3.1: + stage: build + script: 'mvn ${MAVEN_CLI_OPTS} clean package -Phadoop-3.2 -Pspark-3.1' + artifacts: + name: "flowman-dist-hadoop3.2-spark3.1" + paths: + - flowman-dist/target/flowman-dist-*-bin.tar.gz + expire_in: 5 days + +build-cdh5.15: + stage: build + except: + - pushes + script: 'mvn ${MAVEN_CLI_OPTS} clean package -PCDH-5.15' + artifacts: + name: "flowman-dist-cdh5.15" + paths: + - flowman-dist/target/flowman-dist-*-bin.tar.gz + expire_in: 5 days + +build-cdh6.3: + stage: build + except: + - pushes + script: 'mvn ${MAVEN_CLI_OPTS} clean package -PCDH-6.3' + artifacts: + name: "flowman-dist-cdh6.3" + paths: + - flowman-dist/target/flowman-dist-*-bin.tar.gz + expire_in: 5 days diff --git a/.travis.yml b/.travis.yml index e81943aaf..b61822ef5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -63,6 +63,10 @@ jobs: jdk: openjdk8 script: mvn clean install -Phadoop-3.2 -Pspark-3.0 + - name: Hadoop 3.2 with Spark 3.1 + jdk: openjdk8 + script: mvn clean install -Phadoop-3.2 -Pspark-3.1 + - name: CDH 5.15 jdk: openjdk8 script: mvn clean install -PCDH-5.15 diff --git a/BUILDING.md b/BUILDING.md index 22fd6851d..69fc5814f 100644 --- a/BUILDING.md +++ b/BUILDING.md @@ -7,7 +7,9 @@ is installed on the build machine. Building Flowman with the default settings (i.e. Hadoop and Spark version) is as easy as - mvn clean install +```shell +mvn clean install +``` ## Main Artifacts @@ -28,11 +30,15 @@ You should also configure git such that all files are checked out using "LF" end some unittests may fail and Docker images might not be useable. This can be done by setting the git configuration value "core.autocrlf" to "input" - git config --global core.autocrlf input - +```shell +git config --global core.autocrlf input +``` + You might also want to skip unittests (the HBase plugin is currently failing under windows) - mvn clean install -DskipTests +```shell +mvn clean install -DskipTests +``` It may well be the case that some unittests fail on Windows - don't panic, we focus on Linux systems and ensure that the `master` branch really builds clean with all unittests passing on Linux. @@ -42,13 +48,17 @@ the `master` branch really builds clean with all unittests passing on Linux. Per default, Flowman will be built for fairly recent versions of Spark (2.4.5 as of this writing) and Hadoop (2.8.5). But of course you can also build for a different version by either using a profile - - mvn install -Pspark2.3 -Phadoop2.7 -DskipTests - + +```shell +mvn install -Pspark2.3 -Phadoop2.7 -DskipTests +``` + This will always select the latest bugfix version within the minor version. You can also specify versions explicitly as follows: - mvn install -Dspark.version=2.2.1 -Dhadoop.version=2.7.3 +```shell +mvn install -Dspark.version=2.2.1 -Dhadoop.version=2.7.3 +``` Note that using profiles is the preferred way, as this guarantees that also dependencies are selected using the correct version. The following profiles are available: @@ -56,6 +66,7 @@ using the correct version. The following profiles are available: * spark-2.3 * spark-2.4 * spark-3.0 +* spark-3.1 * hadoop-2.6 * hadoop-2.7 * hadoop-2.8 @@ -69,63 +80,92 @@ With these profiles it is easy to build Flowman to match your environment. ## Building for Open Source Hadoop and Spark -Spark 2.3 and Hadoop 2.6: +### Spark 2.3 and Hadoop 2.6: - mvn clean install -Pspark-2.3 -Phadoop-2.6 - -Spark 2.3 and Hadoop 2.7: - - mvn clean install -Pspark-2.3 -Phadoop-2.7 +```shell +mvn clean install -Pspark-2.3 -Phadoop-2.6 +``` -Spark 2.3 and Hadoop 2.8: +### Spark 2.3 and Hadoop 2.7: - mvn clean install -Pspark-2.3 -Phadoop-2.8 +```shell +mvn clean install -Pspark-2.3 -Phadoop-2.7 +``` -Spark 2.3 and Hadoop 2.9: +### Spark 2.3 and Hadoop 2.8: - mvn clean install -Pspark-2.3 -Phadoop-2.9 +```shell +mvn clean install -Pspark-2.3 -Phadoop-2.8 +``` -Spark 2.4 and Hadoop 2.6: +### Spark 2.3 and Hadoop 2.9: - mvn clean install -Pspark-2.4 -Phadoop-2.6 - -Spark 2.4 and Hadoop 2.7: +```shell +mvn clean install -Pspark-2.3 -Phadoop-2.9 +``` - mvn clean install -Pspark-2.4 -Phadoop-2.7 +### Spark 2.4 and Hadoop 2.6: -Spark 2.4 and Hadoop 2.8: +```shell +mvn clean install -Pspark-2.4 -Phadoop-2.6 +``` - mvn clean install -Pspark-2.4 -Phadoop-2.8 +### Spark 2.4 and Hadoop 2.7: -Spark 2.4 and Hadoop 2.9: +```shell +mvn clean install -Pspark-2.4 -Phadoop-2.7 +``` - mvn clean install -Pspark-2.4 -Phadoop-2.9 +### Spark 2.4 and Hadoop 2.8: -Spark 3.0 and Hadoop 3.1 +```shell +mvn clean install -Pspark-2.4 -Phadoop-2.8 +``` - mvn clean install -Pspark-3.0 -Phadoop-3.1 +### Spark 2.4 and Hadoop 2.9: -Spark 3.0 and Hadoop 3.2 +```shell +mvn clean install -Pspark-2.4 -Phadoop-2.9 +``` - mvn clean install -Pspark-3.0 -Phadoop-3.2 +### Spark 3.0 and Hadoop 3.1 + +```shell +mvn clean install -Pspark-3.0 -Phadoop-3.1 +``` + +### Spark 3.0 and Hadoop 3.2 + +```shell +mvn clean install -Pspark-3.0 -Phadoop-3.2 +``` + +### Spark 3.1 and Hadoop 3.2 + +```shell +mvn clean install -Pspark-3.1 -Phadoop-3.2 +``` ## Building for Cloudera The Maven project also contains preconfigured profiles for Cloudera. - mvn clean install -Pspark-2.3 -PCDH-5.15 -DskipTests +```shell +mvn clean install -Pspark-2.3 -PCDH-5.15 -DskipTests +``` Or for Cloudera 6.3 - mvn clean install -Pspark-2.4 -PCDH-6.3 -DskipTests - - -## Skipping Docker Image +```shell +mvn clean install -Pspark-2.4 -PCDH-6.3 -DskipTests +``` -Part of the build also is a Docker image. Since you might not want to use it, because you are using different base -images, you can skip the building of the Docker image via `-Ddockerfile.skip` +# Coverage Analysis +```shell +mvn scoverage:report +``` -## Building Documentation +# Building Documentation Flowman also contains Markdown documentation which is processed by Sphinx to generate the online HTML documentation. diff --git a/CHANGELOG.md b/CHANGELOG.md index 5dce3994f..7927408cb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,38 @@ -# Version 0.14.2 +# Version 0.15.0 - 2021-03-23 + +* New configuration variable `floman.default.target.rebalance` +* New configuration variable `floman.default.target.parallelism` +* Changed behaviour: The `mergeFile` target now does not assume any more that the `target` is local. If you already + use `mergeFiles` with a local file, you need to prefix the target file name with `file://`. +* Add new `-t` argument for selectively building a subset of targets +* Remove example-plugin +* Add quickstart guide +* Add new "flowman-parent" BOM for projects using Flowman +* Move `com.dimajix.flowman.annotations` package to `com.dimajix.flowman.spec.annotations` +* Add new log redaction +* Integrate Scala scode coverage analysis +* `assemble` will fail when trying to use non-existing columns +* Move `swagger` and `json` schema support into separate plugins +* Change default build to Spark 3.0 and Hadoop 3.2 +* Update Spark to 3.0.2 +* Rename class `Executor` to `Execution` - watch your plugins! +* Implement new configurable `Executor` class for executing build targets. +* Add build profile for Spark 3.1.x +* Update ScalaTest to 3.2.5 - watch your unittests for changed ScalaTest API! +* Add new `case` mapping +* Add new `--dry-run` command line option +* Add new `mock` and `null` mapping types +* Add new `mock` relation +* Add new `values` mapping +* Add new `values` dataset +* Implement new testing capabilities +* Rename `update` mapping to `upsert` mapping, which better describes its functionality +* Introduce new `VALIDATE` phase, which is executed even before `CREATE` phase +* Implement new `validate` and `verify` targets +* Implement new `deptree` command in Flowman shell + + +# Version 0.14.2 - 2020-10-12 * Upgrade to Spark 2.4.7 and Spark 3.0.1 * Clean up dependencies diff --git a/INSTALLING.md b/INSTALLING.md index 89441aa5f..d9c9e46f5 100644 --- a/INSTALLING.md +++ b/INSTALLING.md @@ -15,7 +15,7 @@ Note that Flowman can be built for different Hadoop and Spark versions, and the needs to match the ones of your platform -## Downloading Flowman +# Downloading Flowman Currently since version 0.14.1, prebuilt releases are provided on [GitHub](https://github.com/dimajix/flowman/releases). This probably is the simplest way to grab a working Flowman package. Note that for each release, there are different @@ -34,7 +34,6 @@ https://github.com/dimajix/flowman/releases/download/0.14.1/flowman-dist-0.14.1- ``` - ## Building Flowman As an alternative to downloading a pre-built distribution of Flowman, you might also want to @@ -42,7 +41,7 @@ As an alternative to downloading a pre-built distribution of Flowman, you might has basic experience with Maven. -## Local Installation +# Local Installation Flowman is distributed as a `tar.gz` file, which simply needs to be extracted at some location on your computer or server. This can be done via @@ -87,7 +86,7 @@ tar xvzf flowman-dist-X.Y.Z-bin.tar.gz * The `examples` directory contains some example projects -## Configuration +# Configuration You probably need to perform some basic global configuration of Flowman. The relevant files are stored in the `conf` directory. diff --git a/NOTICE b/NOTICE index 963f71adf..788ef2858 100644 --- a/NOTICE +++ b/NOTICE @@ -107,3 +107,9 @@ Ganymed SSH2 * license/LICENSE-ganymed-ssh2.txt (BSD License) * HOMEPAGE * http://www.ganymed.ethz.ch/ssh2/ + +Vuetify + * LICENSE + * license/LICENSE-vuetify.txt (MIT License) + * HOMEPAGE + * https://vuetifyjs.com diff --git a/QUICKSTART.md b/QUICKSTART.md new file mode 100644 index 000000000..d2f2a183a --- /dev/null +++ b/QUICKSTART.md @@ -0,0 +1,177 @@ +# Flowman Quickstart Guide + +# 1. Installation + +In order to run the example, you need to have valid access credentials to AWS, since we will be using some data +stored in S3. + +## 1.1 Install Spark + +Although Flowman directly builds upon the power of Apache Spark, it does not provide a working Hadoop or Spark +environment — and there is a good reason for that: In many environments (specifically in companies using Hadoop +distributions) a Hadoop/Spark environment is already provided by some platform team. And Flowman tries its best not +to mess this up and instead requires a working Spark installation. + +Fortunately, Spark is rather simple to install locally on your machine: + +### Download & Install Spark + +As of this writing, the latest release of Flowman is 0.14.2 and is available prebuilt for Spark 3.0.1 on the Spark +homepage. So we download the appropriate Spark distribution from the Apache archive and unpack it. + +```shell +# Create a nice playground which doesn't mess up your system +mkdir playground +cd playground# Download and unpack Spark & Hadoop + +curl -L https://archive.apache.org/dist/spark/spark-3.0.1/spark-3.0.1-bin-hadoop3.2.tgz | tar xvzf -# Create a nice link +ln -snf spark-3.0.1-bin-hadoop3.2 spark +``` + +The Spark package already contains Hadoop, so with this single download you already have both installed and integrated with each other. + +## 1.2 Install Flowman + +You find prebuilt Flowman packages on the corresponding release page on GitHub. For this quickstart, we chose +`flowman-dist-0.14.2-oss-spark3.0-hadoop3.2-bin.tar.gz` which nicely fits to the Spark package we just downloaded before. + +```shell +# Download and unpack Flowman +curl -L https://github.com/dimajix/flowman/releases/download/0.14.2/flowman-dist-0.14.2-oss-spark3.0-hadoop3.2-bin.tar.gz | tar xvzf -# Create a nice link +ln -snf flowman-0.14.2 flowman +``` + +### Flowman Configuration + +Now before you can use Flowman, you need to tell it where it can find the Spark home directory which we just created +in the previous step. This can be either done by providing a valid configuration file in +`flowman/conf/flowman-env.sh` (a template can be found at `flowman/conf/flowman-env.sh.template` ), or we can simply +set an environment variable. For the sake of simplicity, we follow the second approach + +```shell +# This assumes that we are still in the directory "playground" +export SPARK_HOME=$(pwd)/spark +``` + +In order to access S3 in the example below, we also need to provide a default namespace which contains some basic +plugin configurations. We simply copy the provided template as follows: + +```shell +# Copy default namespace +cp flowman/conf/default-namespace.yml.template flowman/conf/default-namespace.yml +cp flowman/conf/flowman-env.sh.template flowman/conf/flowman-env.sh + +export AWS_ACCESS_KEY_ID= +export AWS_SECRET_ACCESS_KEY= +``` +That’s all we need to run the Flowman example. + + +# 2. Flowman Shell + +The example data is stored in a S3 bucket provided by myself. In order to access the data, you need to provide valid +AWS credentials in your environment: + +```shell +$ export AWS_ACCESS_KEY_ID= +$ export AWS_SECRET_ACCESS_KEY= +``` + +## 2.1 Start interactive Flowman shell + +We start Flowman by running the interactive Flowman shell. While this is not the tool that would be used in automatic +batch processing ( flowexec is the right tool for that scenario), it gives us a good idea how ETL projects in Flowman +are organized. + +```shell +cd flowman +bin/flowshell -f examples/weather +``` + +## 2.2 Inspecting Relations + +Now we can inspect some of the relations defined in the project. First we list all relations +``` +flowman:weather> relation list +``` + +Now we can peek inside the relations `stations-raw` and `measurements-raw`. Since the second relation is partitioned +by years, we explicitly specify the year via the option `-p year=2011` +``` +flowman:weather> relation show stations-raw +flowman:weather> relation show measurements-raw -p year=2011 +``` + +## 2.3 Running a Job + +Now we want to execute the projects main job. Again the job is parametrized by year, so we need to specify the year +that we'd like to process. +``` +flowman:weather> job build main year=2011 +``` + +## 2.4 Inspecting Mappings + +Now we'd like to inspect some of the mappings which have been used during job execution. Since some mappings depend +on job-specific variables, we need to create a *job context*, which can be done by `job enter ` +as follows: +``` +flowman:weather> job enter main year=2011 +``` +Note how the prompt has changed and will now include the job name. Now we can inspect some mappings: +``` +flowman:weather/main> mapping list +flowman:weather/main> mapping show measurements-raw +flowman:weather/main> mapping show measurements-extracted +flowman:weather/main> mapping show stations-raw +``` +Finally we'd like to leave the job context again. +``` +flowman:weather/main> job leave +``` + + +## 2.5 Inspect Results + +The job execution has written its results into some relations again. We can now inspect them again +``` +flowman:weather> relation show stations +flowman:weather> relation show measurements +flowman:weather> relation show aggregates -p year=2011 +``` + +## 2.6 History + +Flowman also provides an execution history. In the trivial deployment, this information is stored locally in a +Derby database, but other databases like MySQL, MariaDB etc are also supported. +``` +flowman:weather> history job search +flowman:weather> history target search -J 1 +``` + +## 2.7 Quitting + +Finally we quit the Flowman shell via the `quit` command. +``` +flowman:weather> quit +``` + +# 3. Flowman Execution + +So far we have only used the Flowman shell for interactive work with projects. Actually, the shell was developed as a +second step to help analyzing problems and debugging data flows. The primary command for working with Flowman projects +is `flowexec` which is used for non-interactive batch execution, for example within cron-jobs. + +It shares a lot of code with the Flowman shell, so the commands are often exactly the same. The main difference is +that with `flowexec` you specify the commands on the command line while `flowshell` provides its own prompt. + +For example for running the “build” lifecycle of the weather project for the year 2014, you only need to run: +```shell +bin/flowexec -f examples/weather job build main year=2014 +``` + +# 4. Closing + +A very special *Thank You!* goes to all of you who try to follow the example hands-on on your local machine. If you have +problems with following the example, please leave me a note — it’s always difficult to streamline such a process, and +I might have overseen some issues. diff --git a/docs/building.md b/docs/building.md index 161c7cab6..e479e6d58 100644 --- a/docs/building.md +++ b/docs/building.md @@ -1,14 +1,33 @@ # Building Flowman Since Flowman depends on libraries like Spark and Hadoop, which are commonly provided by a platform environment like -Cloudera or EMR, you currently need to build Flowman yourself to match the correct versions. Prebuilt Flowman -distributions are planned, but not available yet. +Cloudera or EMR, you currently need to build Flowman yourself to match the correct versions. + +## Download Prebuilt Distribution + +As an alternative to building Flowman yourself, prebuilt Flowman distributions are are provided on +[GitHub](https://github.com/dimajix/flowman/releases). This probably is the simplest way to grab a working Flowman +package. Note that for each release, there are different packages being provided, for different Spark and Hadoop +versions. The naming is very simple: +``` +flowman-dist--oss-spark-hadoop-bin.tar.gz +``` +You simply have to use the package which fits to the Spark and Hadoop versions of your environment. For example the +package of Flowman 0.14.1 and for Spark 3.0 and Hadoop 3.2 would be +``` +flowman-dist-0.14.1-oss-spark30-hadoop32-bin.tar.gz +``` +and the full URL then would be +``` +https://github.com/dimajix/flowman/releases/download/0.14.1/flowman-dist-0.14.1-oss-spark3.0-hadoop3.2-bin.tar.gz +``` The whole project is built using Maven. The build also includes a Docker image, which requires that Docker is installed on the build machine - building the Docker image can be disabled (see below). ## Build with Maven +When you decide against downloading a prebuilt Flowman distribution, you can simply built it yourself with Maven. Building Flowman with the default settings (i.e. Hadoop and Spark version) is as easy as mvn clean install @@ -57,6 +76,7 @@ using the correct version. The following profiles are available: * spark-2.3 * spark-2.4 * spark-3.0 +* spark-3.1 * hadoop-2.6 * hadoop-2.7 * hadoop-2.8 @@ -110,6 +130,11 @@ Spark 3.0 and Hadoop 3.2 mvn clean install -Pspark-3.0 -Phadoop-3.2 +Spark 3.1 and Hadoop 3.2 + + mvn clean install -Pspark-3.1 -Phadoop-3.2 + + ### Building for Cloudera The Maven project also contains preconfigured profiles for Cloudera. @@ -121,11 +146,6 @@ Or for Cloudera 6.3 mvn clean install -Pspark-2.4 -PCDH-6.3 -DskipTests -## Skipping Docker Image - -Part of the build also is a Docker image. Since you might not want to use it, because you are using different base -images, you can skip the building of the Docker image via `-Ddockerfile.skip` - ## Building Documentation Flowman also contains Markdown documentation which is processed by Sphinx to generate the online HTML documentation. diff --git a/docs/cli/flowshell.md b/docs/cli/flowshell.md index c695efe4b..1a54f288a 100644 --- a/docs/cli/flowshell.md +++ b/docs/cli/flowshell.md @@ -14,7 +14,7 @@ * `--spark-name ` Sets the Spark application name -# Commands +## Commands All commands within the Flowman Shell mimic the commands of [flowexec](flowexec.md). The main difference to multiple invocations of `flowexec` is the fact that the project is loaded only once and some additional commands are provided. diff --git a/docs/cli/index.md b/docs/cli/index.md index fc960fd19..60e948115 100644 --- a/docs/cli/index.md +++ b/docs/cli/index.md @@ -2,8 +2,6 @@ Flowman provides a small set of executables for working with projects. -## Executables - ```eval_rst .. toctree:: :maxdepth: 1 diff --git a/docs/conf.py b/docs/conf.py index 7a5997622..944f49bbe 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,7 +50,7 @@ # General information about the project. project = 'Flowman' -copyright = '2020, Kaya Kupferschmidt' +copyright = '2021, Kaya Kupferschmidt' author = 'Kaya Kupferschmidt' github_doc_root = 'https://github.com/dimajix/flowman/tree/master/docs/' @@ -60,9 +60,9 @@ # built documents. # # The short X.Y version. -version = '0.14' +version = '0.15' # The full version, including alpha/beta/rc tags. -release = '0.14.2-SNAPSHOT' +release = '0.15.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/docs/config.md b/docs/config.md index 8b6d3ae53..53fac5dce 100644 --- a/docs/config.md +++ b/docs/config.md @@ -28,12 +28,26 @@ variable `FLOWMAN_PLUGIN_DIR` or `FLOWMAN_HOME`. - `flowman.execution.target.forceDirty` *(type: boolean)* *(default:false)* When enabled (i.e. set to `true`), then Flowman will treat all targets as being dirty. Otherwise Flowman will check the existence of targets to decide if a rebuild is required. + +- `flowman.execution.executor.class` *(type: class)* *(default: `com.dimajix.flowman.execution.SimpleExecutor`)* +Configure the executor to use. The default `SimpleExecutor` will process all targets in the correct order +sequentially. - `flowman.default.target.outputMode` *(type: string)* *(default:OVERWRITE)* -Possible values are +Sets the default target output mode. Possible values are - *`OVERWRITE`*: Will overwrite existing data. Only supported in batch output. - *`APPEND`*: Will append new records to existing data - *`UPDATE`*: Will update existing data. Only supported in streaming output. - *`IGNORE_IF_EXISTS`*: Silently skips the output if it already exists. - *`ERROR_IF_EXISTS`*: Throws an error if the output already exists +Note that you can still explicitly specify a different output mode in each target. +- `floman.default.target.rebalance` *(type: boolean)* *(default:false)* +If set to `true`, Flowman will try to write a similar records per each output file. Rebelancing might be an expensive +operation since it will invoke a Spark network shuffle. Note that you can still explicitly use different settings per +target. + +- `floman.default.target.parallelism` *(type: int)* *(default:16)* +Sets the default number of output files per target. If set to zero or a negative value, the number of output files is +implicitly determined by the number of internal Spark partitions, i.e. no explicit change will be performed. Note that +you can still explicitly use different settings per target. diff --git a/docs/index.md b/docs/index.md index 796d45c21..8f693533e 100644 --- a/docs/index.md +++ b/docs/index.md @@ -29,6 +29,10 @@ and schema information) in a single place managed by a single program. ## Where to go from here +## Quickstart +A small [quickstart guide](quickstart.md) will lead you through a simple example. + + ### Installation * [Flowman Installation](installation.md): Installation Guide * [Configuration](config.md): Configuration settings @@ -60,6 +64,7 @@ More detail on all these items is described in the following sections: ### Cookbooks * [Testing](cookbook/testing.md) How to implement tests in Flowman +* [Kerberos](cookbook/kerberos.md) How to use Flowman in Kerberized environments ## Table of Contents @@ -69,6 +74,7 @@ More detail on all these items is described in the following sections: :maxdepth: 1 :glob: + quickstart building installation lifecycle diff --git a/docs/lifecycle.md b/docs/lifecycle.md index 7f01bc3d9..b590542cd 100644 --- a/docs/lifecycle.md +++ b/docs/lifecycle.md @@ -7,36 +7,40 @@ multiple different phases, each of them representing one stage of the whole life The full lifecycle consists out of specific phases, as follows: -1. **CREATE**. +1. **VALIDATE**. +This first phase is used for validation and any error will stop the next steps. A validation step might for example +check preconditions on data sources which are a hard requirement. + +2. **CREATE**. This will create all relations (tables and directories) specified as targets. The tables will not contain any data, they only provide an empty hull. If a target already exists, a migration will be started instead. This will migrate a relation (table or directory) to a new schema, if required. Note that this is not supported by all target types, and even if a target supports migration in general, it may not be possible due to unmigratable changes. -2. **BUILD**. +3. **BUILD**. The *build* phase will actually create all records and fill them into the specified relations. -3. **VERIFY**. +4. **VERIFY**. The *verify* phase will perform simple checks (for example if a specific Hive partition exists), or may also include some specific user defined tests that compare data. If verification fails, the build process stops. -4. **TRUNCATE**. +5. **TRUNCATE**. *Truncate* is the first of two phases responsible for cleanup. *Truncate* will only remove individual partitions from tables (i.e. it will delete data), but it will keep tables alive. -5. **DESTROY**. +6. **DESTROY**. The final phase *destroy* is used to phyiscally remove relations including their data. This will also remove table definitions, views and directories. It performs the opposite operation than the *create* phase. -## Built In Lifecycles: +## Built In Lifecycles Some of the execution phases can be performed in a meaningful way one after the other. Such a sequence of phases is called *lifecycle*. Flowman has the following lifecycles built in: ### Build -The first lifecycle contains the three phases *CREATE* and *BUILD*. +The first lifecycle contains the three phases *VALIDATE*, *CREATE*, *BUILD* and *VERIFY*. ### Truncate diff --git a/docs/quickstart.md b/docs/quickstart.md new file mode 100644 index 000000000..88d52a4df --- /dev/null +++ b/docs/quickstart.md @@ -0,0 +1,175 @@ +# Flowman Quickstart Guide + +In order to run the example, you need to have valid access credentials to AWS, since we will be using some data +stored in S3. + +## 1. Install Spark + +Although Flowman directly builds upon the power of Apache Spark, it does not provide a working Hadoop or Spark +environment — and there is a good reason for that: In many environments (specifically in companies using Hadoop +distributions) a Hadoop/Spark environment is already provided by some platform team. And Flowman tries its best not +to mess this up and instead requires a working Spark installation. + +Fortunately, Spark is rather simple to install locally on your machine: + +### Download & Install Spark + +As of this writing, the latest release of Flowman is 0.14.2 and is available prebuilt for Spark 3.0.1 on the Spark +homepage. So we download the appropriate Spark distribution from the Apache archive and unpack it. + +```shell +# Create a nice playground which doesn't mess up your system +mkdir playground +cd playground# Download and unpack Spark & Hadoop + +curl -L https://archive.apache.org/dist/spark/spark-3.0.1/spark-3.0.1-bin-hadoop3.2.tgz | tar xvzf -# Create a nice link +ln -snf spark-3.0.1-bin-hadoop3.2 spark +``` + +The Spark package already contains Hadoop, so with this single download you already have both installed and integrated with each other. + +## 2. Install Flowman + +You find prebuilt Flowman packages on the corresponding release page on GitHub. For this quickstart, we chose +`flowman-dist-0.14.2-oss-spark3.0-hadoop3.2-bin.tar.gz` which nicely fits to the Spark package we just downloaded before. + +```shell +# Download and unpack Flowman +curl -L https://github.com/dimajix/flowman/releases/download/0.14.2/flowman-dist-0.14.2-oss-spark3.0-hadoop3.2-bin.tar.gz | tar xvzf -# Create a nice link +ln -snf flowman-0.14.2 flowman +``` + +### Flowman Configuration + +Now before you can use Flowman, you need to tell it where it can find the Spark home directory which we just created +in the previous step. This can be either done by providing a valid configuration file in +`flowman/conf/flowman-env.sh` (a template can be found at `flowman/conf/flowman-env.sh.template` ), or we can simply +set an environment variable. For the sake of simplicity, we follow the second approach + +```shell +# This assumes that we are still in the directory "playground" +export SPARK_HOME=$(pwd)/spark +``` + +In order to access S3 in the example below, we also need to provide a default namespace which contains some basic +plugin configurations. We simply copy the provided template as follows: + +```shell +# Copy default namespace +cp flowman/conf/default-namespace.yml.template flowman/conf/default-namespace.yml +cp flowman/conf/flowman-env.sh.template flowman/conf/flowman-env.sh + +export AWS_ACCESS_KEY_ID= +export AWS_SECRET_ACCESS_KEY= +``` +That’s all we need to run the Flowman example. + + +## 3. Flowman Shell + +The example data is stored in a S3 bucket provided by myself. In order to access the data, you need to provide valid +AWS credentials in your environment: + +```shell +$ export AWS_ACCESS_KEY_ID= +$ export AWS_SECRET_ACCESS_KEY= +``` + +### Start interactive Flowman shell + +We start Flowman by running the interactive Flowman shell. While this is not the tool that would be used in automatic +batch processing ( flowexec is the right tool for that scenario), it gives us a good idea how ETL projects in Flowman +are organized. + +```shell +cd flowman +bin/flowshell -f examples/weather +``` + +### Inspecting Relations + +Now we can inspect some of the relations defined in the project. First we list all relations +``` +flowman:weather> relation list +``` + +Now we can peek inside the relations `stations-raw` and `measurements-raw`. Since the second relation is partitioned +by years, we explicitly specify the year via the option `-p year=2011` +``` +flowman:weather> relation show stations-raw +flowman:weather> relation show measurements-raw -p year=2011 +``` + +### Running a Job + +Now we want to execute the projects main job. Again the job is parametrized by year, so we need to specify the year +that we'd like to process. +``` +flowman:weather> job build main year=2011 +``` + +### Inspecting Mappings + +Now we'd like to inspect some of the mappings which have been used during job execution. Since some mappings depend +on job-specific variables, we need to create a *job context*, which can be done by `job enter ` +as follows: +``` +flowman:weather> job enter main year=2011 +``` +Note how the prompt has changed and will now include the job name. Now we can inspect some mappings: +``` +flowman:weather/main> mapping list +flowman:weather/main> mapping show measurements-raw +flowman:weather/main> mapping show measurements-extracted +flowman:weather/main> mapping show stations-raw +``` +Finally we'd like to leave the job context again. +``` +flowman:weather/main> job leave +``` + + +### Inspecting Results + +The job execution has written its results into some relations again. We can now inspect them again +``` +flowman:weather> relation show stations +flowman:weather> relation show measurements +flowman:weather> relation show aggregates -p year=2011 +``` + +### History + +Flowman also provides an execution history. In the trivial deployment, this information is stored locally in a +Derby database, but other databases like MySQL, MariaDB etc are also supported. +``` +flowman:weather> history job search +flowman:weather> history target search -J 1 +``` + +### Quitting + +Finally we quit the Flowman shell via the `quit` command. +``` +flowman:weather> quit +``` + +## 4. Flowman Batch Execution + +So far we have only used the Flowman shell for interactive work with projects. Actually, the shell was developed as a +second step to help analyzing problems and debugging data flows. The primary command for working with Flowman projects +is `flowexec` which is used for non-interactive batch execution, for example within cron-jobs. + +It shares a lot of code with the Flowman shell, so the commands are often exactly the same. The main difference is +that with `flowexec` you specify the commands on the command line while `flowshell` provides its own prompt. + +For example for running the “build” lifecycle of the weather project for the year 2014, you only need to run: +```shell +bin/flowexec -f examples/weather job build main year=2014 +``` + +## 5. Congratulations! + +A very special *Thank You!* goes to all of you who try to follow the example hands-on on your local machine. If you have +problems with following the example, please leave me a note — it’s always difficult to streamline such a process, and +I might have overseen some issues. diff --git a/docs/spec/assertion/index.md b/docs/spec/assertion/index.md new file mode 100644 index 000000000..ad7e416d5 --- /dev/null +++ b/docs/spec/assertion/index.md @@ -0,0 +1,15 @@ +# Assertions + +Assertions are an important building block of test cases and validation/verification steps. Assertions are not a top +level entity like [targets](../target/index.md), [mappings](../mapping/index.md), [job](../job/index.md), etc... Instead +assertions are used as part of [tests](../test/index.md) and [validation](../target/validate.md) and +[verification](../target/verify.md) targets. + +## Assertion Types +```eval_rst +.. toctree:: + :maxdepth: 1 + :glob: + + * +``` diff --git a/docs/spec/assertion/sql.md b/docs/spec/assertion/sql.md new file mode 100644 index 000000000..4b18bd990 --- /dev/null +++ b/docs/spec/assertion/sql.md @@ -0,0 +1,61 @@ +## SQL Assertion + +One of the most flexible assertions is a *SQL query* assertion, which simply executed some SQL query, which can access +all mappings as inputs and then compares the result to some expected result. + +## Example: + +```yaml +kind: sql +description: "Assert the correct number of records in measurements_extracted" +query: "SELECT COUNT(*) FROM measurements_extracted" +expected: 2 +``` + +You can also specify multiple assertions within a single `sql` assertion block: +```yaml +kind: sql +tests: + - query: "SELECT id,count(*) FROM output_table GROUP BY id HAVING count(*) > 0" + expected: [] + - query: "SELECT id,count(*) FROM output_cube GROUP BY id HAVING count(*) > 0" + expected: [] +``` + +Or a more complete example when used in conjunction with a [`verify`](../target/verify.md) target: +```yaml +targets: + verify_output: + kind: verify + assertions: + assert_primary_key: + kind: sql + tests: + - query: "SELECT id,count(*) FROM output_table GROUP BY id HAVING count(*) > 0" + expected: [] + - query: "SELECT id,count(*) FROM output_cube GROUP BY id HAVING count(*) > 0" + expected: [] + + assert_measurement_count: + kind: sql + query: "SELECT COUNT(*) FROM measurements_extracted" + expected: 2 +``` + +## Fields + +* `kind` **(mandatory)** *(type: string)*: `sql` + +* `description` **(optional)** *(type: string)*: + A textual description of the assertion + +* `query` **(optional)** *(type: string)*: + The SQL query to be executed. + +* `expected` **(optional)** *(type: list:record)*: + The expected answer. + +* `tests` **(optional)** *(type: map:test)*: + An optional map of individual test cases, each containing a `query` and a `expected` field. Note that you can + use the fields `query` together with `expected` for specifying a single check or you can specify multiple tests + in this list. You can also use a combination of both, although that might look strange. diff --git a/docs/spec/dataset/values.md b/docs/spec/dataset/values.md new file mode 100644 index 000000000..fd42f2e6c --- /dev/null +++ b/docs/spec/dataset/values.md @@ -0,0 +1,40 @@ +# Values Dataset + +## Example + +```yaml +kind: values +schema: + kind: embedded + fields: + - name: int_col + type: integer + - name: str_col + type: string +records: + - [1,"some_string"] + - [2,"cat"] +``` + +```yaml +kind: values +columns: + int_col: integer + str_col: string +records: + - [1,"some_string"] + - [2,"cat"] +``` + +## Fields + +* `kind` **(mandatory)** *(type: string)*: `values` or `const` + +* `records` **(optional)** *(type: list:array)* *(default: empty)*: + An optional list of records to be returned. + +* `columns` **(optional)** *(type: map:string)*: + Specifies the list of column names (key) with their type (value) + +* `schema` **(optional)** *(type: schema)*: + As an alternative of specifying a list of columns you can also directly specify a schema. diff --git a/docs/spec/index.md b/docs/spec/index.md index de8e1a11a..244b54eab 100644 --- a/docs/spec/index.md +++ b/docs/spec/index.md @@ -16,8 +16,6 @@ readability. These YAML files are read in by Flowman executables and data flows executed as specified on the command line (more on that in [Flowman CLI](../cli/flowexec.md)) -# Entity Documentation - Flowman has a couple of different main entities, which are documented seperately: ```eval_rst @@ -35,6 +33,7 @@ Flowman has a couple of different main entities, which are documented seperately dataset/index schema/index connection/index + test/index metric/index hooks/index ``` diff --git a/docs/spec/job/index.md b/docs/spec/job/index.md index 1b6a8a2a4..44bcdc0c2 100644 --- a/docs/spec/job/index.md +++ b/docs/spec/job/index.md @@ -6,7 +6,7 @@ automatically by Flowman by examining the artifacts being generated and required ## Example -``` +```yaml jobs: main: description: "Processes all outputs" diff --git a/docs/spec/mapping/case.md b/docs/spec/mapping/case.md new file mode 100644 index 000000000..012c2f121 --- /dev/null +++ b/docs/spec/mapping/case.md @@ -0,0 +1,51 @@ +# Case Mapping + +The `case` mapping can be used for a simple "switch/case" control flow, which selects a mapping depending on +given conditions. + +## Example +``` +mappings: + switch: + kind: case + cases: + - condition: ${processing_type == '1'} + input: mapping_1 + - condition: ${processing_type == '2'} + input: mapping_2:extra_output + - condition: true + input: default_mapping +``` + +## Fields +* `kind` **(mandatory)** *(string)*: `case` + +* `broadcast` **(optional)** *(type: boolean)* *(default: false)*: + Hint for broadcasting the result of this mapping for map-side joins. + +* `cache` **(optional)** *(type: string)* *(default: NONE)*: + Cache mode for the results of this mapping. Supported values are + * `NONE` - Disables caching of teh results of this mapping + * `DISK_ONLY` - Caches the results on disk + * `MEMORY_ONLY` - Caches the results in memory. If not enough memory is available, records will be uncached. + * `MEMORY_ONLY_SER` - Caches the results in memory in a serialized format. If not enough memory is available, records will be uncached. + * `MEMORY_AND_DISK` - Caches the results first in memory and then spills to disk. + * `MEMORY_AND_DISK_SER` - Caches the results first in memory in a serialized format and then spills to disk. + +* `cases` **(mandatory)** *(list)*: + List of `condition` and `input` tuples. The first entry where `condition` evaluates to `true` will be selected. + The inputs always refer to a specific mapping output. + + +## Outputs +* `main` - the only output of the mapping. + + +## Description + +The `case` mapping works as a "switch/case" control flow, which selects one of the given alternative as the output. +The cases are processed in the order of the definition, and the first entry, which evaluates to `true` will be selected. +A default case may be added as the last entry by directly using `true` as a condition. + +Note that all environment variables referenced in the conditions need to be defined, even if a case is not to be +selected. diff --git a/docs/spec/mapping/conform.md b/docs/spec/mapping/conform.md index e3386a550..e49682bd0 100644 --- a/docs/spec/mapping/conform.md +++ b/docs/spec/mapping/conform.md @@ -4,7 +4,7 @@ example you can replace all date columns by timestamp columns (this is required you can transform column names from camel case to snake case to better match SQL. ## Example -``` +```yaml mappings: partial_facts: kind: conform diff --git a/docs/spec/mapping/json-extract.md b/docs/spec/mapping/json-extract.md index bb2e2f81c..15b72503d 100644 --- a/docs/spec/mapping/json-extract.md +++ b/docs/spec/mapping/json-extract.md @@ -8,7 +8,7 @@ Since Flowman cannot infer the JSON schema in advance, you need to explicitly sp schema of the JSONs to be extracted. ## Example -``` +```yaml mappings: statement_item: kind: extractJson diff --git a/docs/spec/mapping/latest.md b/docs/spec/mapping/latest.md index 845e6f23a..eabb18822 100644 --- a/docs/spec/mapping/latest.md +++ b/docs/spec/mapping/latest.md @@ -6,7 +6,7 @@ when working with streams of change events and you only want to keep the newest event for each ID. ## Example -``` +```yaml mappings: latest_customer_updates: kind: latest diff --git a/docs/spec/mapping/mock.md b/docs/spec/mapping/mock.md new file mode 100644 index 000000000..73f1def48 --- /dev/null +++ b/docs/spec/mapping/mock.md @@ -0,0 +1,51 @@ +# Mock Mapping + +The `mock` mapping works similar to the [`null`](null.md) mapping in that it creates an empty output. But instead of +explicitly specifying a schema of the empty output, the `mock` mapping will fetch the schema from a different mapping. +This mapping is most useful to be used in tests. In addition it is also possible to manually specify records to be +returned, which makes this mapping even more convenient for mocking. + +## Example +```yaml +mappings: + # This will mock `some_mapping` + some_mapping: + kind: mock +``` + +```yaml +mappings: + empty_mapping: + kind: mock + mapping: some_mapping + records: + - [1,2,"some_string",""] + - [2,null,"cat","black"] +``` + +## Fields +* `kind` **(mandatory)** *(type: string)*: `mock` + +* `broadcast` **(optional)** *(type: boolean)* *(default: false)*: + Hint for broadcasting the result of this mapping for map-side joins. + +* `cache` **(optional)** *(type: string)* *(default: NONE)*: + Cache mode for the results of this mapping. Supported values are + * `NONE` + * `DISK_ONLY` + * `MEMORY_ONLY` + * `MEMORY_ONLY_SER` + * `MEMORY_AND_DISK` + * `MEMORY_AND_DISK_SER` + +* `mapping` **(optional)** *(type: string)*: + Specifies the name of the mapping to be mocked. If no name is given, the a mapping with the same name will be + mocked. Note that this will only work when used as an override mapping in test cases, otherwise an infinite loop + would be created by referencing to itself. + +* `records` **(optional)** *(type: list:array)* *(default: empty)*: + An optional list of records to be returned. + + +## Outputs +The `mock` mapping provides all outputs as the mocked mapping. diff --git a/docs/spec/mapping/null.md b/docs/spec/mapping/null.md new file mode 100644 index 000000000..2350eba67 --- /dev/null +++ b/docs/spec/mapping/null.md @@ -0,0 +1,57 @@ +# Null Mapping + +The `null` (or `empty`) mapping is a dummy mapping which produces and empty output but with a specified schema. This +is mainly useful for mocking other mappings in tests. + +## Example +```yaml +mappings: + empty_mapping: + kind: null + columns: + id: String + temperature: Float + wind_speed: Float +``` + +```yaml +mappings: + empty_mapping: + kind: empty + schema: + kind: embedded + fields: + - name: id + type: string + - name: amount + type: double +``` + +## Fields +* `kind` **(mandatory)** *(type: string)*: `null` or `empty` + +* `broadcast` **(optional)** *(type: boolean)* *(default: false)*: + Hint for broadcasting the result of this mapping for map-side joins. + +* `cache` **(optional)** *(type: string)* *(default: NONE)*: + Cache mode for the results of this mapping. Supported values are + * `NONE` + * `DISK_ONLY` + * `MEMORY_ONLY` + * `MEMORY_ONLY_SER` + * `MEMORY_AND_DISK` + * `MEMORY_AND_DISK_SER` + +* `columns` **(optional)** *(type: map:string)*: + Specifies the list of column names (key) with their type (value) + +* `schema` **(optional)** *(type: schema)*: + As an alternative of specifying a list of columns you can also directly specify a schema. + +* `filter` **(optional)** *(type: string)* *(default: empty)*: + An optional SQL filter expression that is applied *after* schema operation. + + +## Outputs +* `main` - the only output of the mapping + diff --git a/docs/spec/mapping/read-relation.md b/docs/spec/mapping/read-relation.md index 882855d7d..708b55720 100644 --- a/docs/spec/mapping/read-relation.md +++ b/docs/spec/mapping/read-relation.md @@ -1,6 +1,8 @@ - # Read Mapping +The `read` (or `readRelation`, which is simply an alias) mapping is found in almost all Flowman projects, as it will +read data from relations. It doesn't have any other mappings as inputs, and therefore usually is the first mapping +in a data flow. ## Example ``` @@ -14,6 +16,7 @@ mappings: end: $end_year columns: raw_data: String + filter: "raw_data IS NOT NULL" ``` ## Fields @@ -44,7 +47,9 @@ will be applied to the records after they have been read and interpreted by the underlying source. * `filter` **(optional)** *(type: string)* *(default: empty)*: -An optional SQL filter expression that is applied for reading only a subset of records. +An optional SQL filter expression that is applied for reading only a subset of records. The filter is applied + *after* the schema as specified in `columns` is applied. This means that if you are using `columns`, then you + can only access these columns in the `filter` expression. ## Outputs diff --git a/docs/spec/mapping/recursive-sql.md b/docs/spec/mapping/recursive-sql.md index 4f0f0e055..06afd2ea3 100644 --- a/docs/spec/mapping/recursive-sql.md +++ b/docs/spec/mapping/recursive-sql.md @@ -1,4 +1,3 @@ - # Recursive SQL Mapping The `recursiveSql` mapping allows to execute recursive SQL transformation which contains Spark SQL code. diff --git a/docs/spec/mapping/schema.md b/docs/spec/mapping/schema.md index 5c9fcb20f..fd539cd5d 100644 --- a/docs/spec/mapping/schema.md +++ b/docs/spec/mapping/schema.md @@ -5,10 +5,10 @@ and also performs type conversions. This corresponds to a simple SQL `SELECT` wi `CAST` expressions. ## Example -``` +```yaml mappings: partial_facts: - kind: conform + kind: schema input: facts columns: id: String @@ -16,8 +16,23 @@ mappings: wind_speed: Float ``` +```yaml +mappings: + partial_facts: + kind: schema + input: facts + schema: + kind: embedded + fields: + - name: id + type: string + - name: amount + type: double +``` + + ## Fields -* `kind` **(mandatory)** *(type: string)*: `conform` +* `kind` **(mandatory)** *(type: string)*: `schema` * `broadcast` **(optional)** *(type: boolean)* *(default: false)*: Hint for broadcasting the result of this mapping for map-side joins. @@ -37,7 +52,7 @@ Specifies the name of the input mapping to be filtered. * `columns` **(optional)** *(type: map:string)*: Specifies the list of column names (key) with their type (value) -* `schema` **(optional)** *(type: string)*: +* `schema` **(optional)** *(type: schema)*: As an alternative of specifying a list of columns you can also directly specify a schema. * `filter` **(optional)** *(type: string)* *(default: empty)*: diff --git a/docs/spec/mapping/sql.md b/docs/spec/mapping/sql.md index d2c1a54e5..656aff551 100644 --- a/docs/spec/mapping/sql.md +++ b/docs/spec/mapping/sql.md @@ -1,4 +1,3 @@ - # SQL Mapping The `sql` mapping allows to execute any SQL transformation which contains Spark SQL code. diff --git a/docs/spec/mapping/update.md b/docs/spec/mapping/update.md deleted file mode 100644 index c369ab38d..000000000 --- a/docs/spec/mapping/update.md +++ /dev/null @@ -1,5 +0,0 @@ -# Update Mapping - - -## Outputs -* `main` - the only output of the mapping diff --git a/docs/spec/mapping/upsert.md b/docs/spec/mapping/upsert.md new file mode 100644 index 000000000..0d1011768 --- /dev/null +++ b/docs/spec/mapping/upsert.md @@ -0,0 +1,49 @@ +# Upsert Mapping + +The `upsert` mapping is used to merge two data sets using upsert logic. That means that updates are inserted into an +existing data set and replace existing entries. Entries are identified via primary key columns, which need to +be specified as part of this mapping + +## Example +```yaml +mappings: + merge_updates: + kind: upsert + input: previous_state + updates: state_updates + filter: "operation != 'DELETE'" + keyColumns: id +``` + + +## Fields +* `kind` **(mandatory)** *(type: string)*: `upsert` + +* `broadcast` **(optional)** *(type: boolean)* *(default: false)*: + Hint for broadcasting the result of this mapping for map-side joins. + +* `cache` **(optional)** *(type: string)* *(default: NONE)*: + Cache mode for the results of this mapping. Supported values are + * `NONE` - Disables caching of teh results of this mapping + * `DISK_ONLY` - Caches the results on disk + * `MEMORY_ONLY` - Caches the results in memory. If not enough memory is available, records will be uncached. + * `MEMORY_ONLY_SER` - Caches the results in memory in a serialized format. If not enough memory is available, records will be uncached. + * `MEMORY_AND_DISK` - Caches the results first in memory and then spills to disk. + * `MEMORY_AND_DISK_SER` - Caches the results first in memory in a serialized format and then spills to disk. + +* `input` **(required)** *(type: string)*: + Name of the input mapping containing the previous state without any updates. + +* `updates` **(required)** *(type: string)*: + Name of the additional mapping which contains updates and new entries. + +* `keyColumn` **(required)** *(type: list:string)* + List of column names which form a primary key used for merging. + +* `filter` **(optional)** *(type: string)* + Optional filter condition, which will be applied after the updates have been merged into the input data set. This + filter can be used to remove deleted entries, for example. + + +## Outputs +* `main` - the only output of the mapping diff --git a/docs/spec/mapping/values.md b/docs/spec/mapping/values.md new file mode 100644 index 000000000..4602d14e9 --- /dev/null +++ b/docs/spec/mapping/values.md @@ -0,0 +1,64 @@ +# Values Mapping + +A `values` mapping contains directly specified constant values. It is a good candidate to be used for mocking data in +tests. + + +## Example + +```yaml +mappings: + fake_input: + kind: values + schema: + kind: embedded + fields: + - name: int_col + type: integer + - name: str_col + type: string + records: + - [1,"some_string"] + - [2,"cat"] +``` + +```yaml +mappings: + fake_input: + kind: values + columns: + int_col: integer + str_col: string + records: + - [1,"some_string"] + - [2,"cat"] +``` + + +## Fields +* `kind` **(mandatory)** *(type: string)*: `values` or `const` + +* `broadcast` **(optional)** *(type: boolean)* *(default: false)*: + Hint for broadcasting the result of this mapping for map-side joins. + +* `cache` **(optional)** *(type: string)* *(default: NONE)*: + Cache mode for the results of this mapping. Supported values are + * `NONE` + * `DISK_ONLY` + * `MEMORY_ONLY` + * `MEMORY_ONLY_SER` + * `MEMORY_AND_DISK` + * `MEMORY_AND_DISK_SER` + +* `records` **(optional)** *(type: list:array)* *(default: empty)*: + An optional list of records to be returned. + +* `columns` **(optional)** *(type: map:string)*: + Specifies the list of column names (key) with their type (value) + +* `schema` **(optional)** *(type: schema)*: + As an alternative of specifying a list of columns you can also directly specify a schema. + + +## Outputs +* `main` - the only output of the mapping diff --git a/docs/spec/relation/file.md b/docs/spec/relation/file.md index 78275912d..40a187e17 100644 --- a/docs/spec/relation/file.md +++ b/docs/spec/relation/file.md @@ -1,23 +1,42 @@ # File Relations +File relations are among the most simple relation types. They refer to data stored in individual files, typically on +a distributed and shared file system or object store like Hadoop HDFS or S3. + ## Example -``` +```yaml relations: csv_export: kind: file + # Specify the file format to use format: "csv" + # Specify the base directory where all data is stored. This location does not include the partition pattern location: "${export_dir}" + # Specify the pattern how to identify files and/or partitions. This pattern is relative to the `location` pattern: "${export_pattern}" + # Set format specific options options: delimiter: "," quote: "\"" escape: "\\" header: "true" compression: "gzip" + # Add partition column, which can be used in the `pattern` partitions: - name: datetime type: timestamp granularity: "P1D" + # Specify an optional schema here. It is always recommended to explicitly specify a schema for every relation + # and not just let data flow from a mapping into a target. + schema: + kind: embedded + fields: + - name: country + type: STRING + - name: min_wind_speed + type: FLOAT + - name: max_wind_speed + type: FLOAT ``` ## Fields @@ -54,8 +73,23 @@ relations: ## Description +When using `file` relations as data sinks in a [`relation` target](../target/relation.md), then Flowman will manage the +whole lifecycle of the directory for you. This means that +* The directory specified in `location` will be created during `create` phase +* The directory specified in `location` will be populated with records or partitioning subdirectories will be added + during `build` phase +* The directory specified in `location` will be truncated or individual partitions will be dropped during `clean` phase +* The directory specified in `location` tables will be removed during `destroy` phase + + ## Supported File Format +File relations support all file formats also supported by Spark. This includes simple text files, CSV files, +Parquet files, ORC files and Avro files. Each file format provides its own additional settings which can be specified +in the `options` section. + +### Text + ### CSV ### JSON diff --git a/docs/spec/relation/generic.md b/docs/spec/relation/generic.md new file mode 100644 index 000000000..b81a1a2d7 --- /dev/null +++ b/docs/spec/relation/generic.md @@ -0,0 +1,36 @@ +# Generic Relation + +A `generic` relation gives you access to Spark data relations otherwise not directly +supported by Flowman. + +## Example + +```yaml +relations: + advertiser_setting: + kind: generic + format: "csv" + schema: + kind: embedded + fields: + - name: id + type: Integer + - name: advertiser_setting_id + type: Integer +``` + +## Fields +* `kind` **(mandatory)** *(string)*: `generic` + +* `schema` **(optional)** *(schema)* *(default: empty)*: + Explicitly specifies the schema of the relation. + +* `description` **(optional)** *(string)* *(default: empty)*: + A description of the relation. This is purely for informational purpose. + +* `options` **(optional)** *(map:string)* *(default: empty)*: + All options are passed directly to the reader/writer backend and are specific to each + supported format. + +* `format` **(optional)** *(string)* *(default: empty)*: + Specifies the name of the Spark data source format to use. diff --git a/docs/spec/relation/hiveTable.md b/docs/spec/relation/hiveTable.md index 562d23751..70e016908 100644 --- a/docs/spec/relation/hiveTable.md +++ b/docs/spec/relation/hiveTable.md @@ -5,14 +5,22 @@ The `hiveTable` relation is used for managing Hive tables. ## Examples ### Parquet Example -``` +```yaml relations: parquet_relation: kind: hiveTable database: default table: financial_transactions + # Specify the physical location where the data files should be stored at. If you leave this out, the Hive + # default location will be used location: /warehouse/default/financial_transactions + # Specify the file format to use format: parquet + # Add partition column + partitions: + - name: business_date + type: string + # Specify a schema, which is mandatory for write operations schema: kind: inline fields: @@ -20,22 +28,23 @@ relations: type: string - name: amount type: double - partitions: - - name: business_date - type: string ``` ### CSV Example -``` +```yaml relations: csv_relation: kind: hiveTable database: default table: financial_transactions + # Chose `textfile` file format format: textfile + # Also specify a RowFormat via a Hive class rowFormat: org.apache.hadoop.hive.serde2.OpenCSVSerde + # Specify additional serialization/deserialization properties serdeProperties: separatorChar: "\t" + # Specify a schema, which is mandatory for write operations schema: kind: inline fields: @@ -115,3 +124,11 @@ relations: ## Description + +When using Hive tables as data sinks in a [`relation` target](../target/relation.md), then Flowman will manage the +whole lifecycle for you. This means that +* Hive tables will be created and migrated during `create` phase +* Hive tables will be populated with records and partitions will be added during `build` phase +* Hive tables will be truncated or individual partitions will be dropped during `clean` phase +* Hive tables will be removed during `destroy` phase + diff --git a/docs/spec/relation/hiveUnionTable.md b/docs/spec/relation/hiveUnionTable.md index 7ef3983c2..9e1a936ec 100644 --- a/docs/spec/relation/hiveUnionTable.md +++ b/docs/spec/relation/hiveUnionTable.md @@ -11,17 +11,28 @@ possibly multiple Hive tables (each of them having a different incompatible sche relations: some_table: kind: hiveUnionTable + # Specify the Hive database, where the UNION view will be created viewDatabase: "crm" + # Specify the name of the Hive UNION view view: "my_table" + # Specify the Hive database where the underlying tables are to be created tableDatabase: "crm" + # Specify the prefix of all Hive tables. Flowman will add numbers like 1,2,3,... to the prefix for + # different schema versions tablePrefix: "zz_my_table" + # Specify the location prefix of all Hive tables. Flowman will add numbers like 1,2,3,... to the prefix for + # different schema versions locationPrefix: "/hive/crm/zz_my_table" external: true + # Select file format format: parquet + # Add partition column partitions: - name: landing_date type: string description: "The date on which the contract event was generated" + # Explicitly specify the schema, which is mandatory for this relation type. In this case the schema is inferred + # from a mapping called `some_mapping` schema: kind: mapping mapping: some_mapping @@ -57,6 +68,10 @@ Name of the Hive database where the tables should be created in when Flowman is used to create the Hive table and is ignored otherwise. This corresponds to the `FORMAT` in a `CREATE TABLE` statement. +* `options` **(optional)** *(map:string)* *(default: empty)*: + All key-value pairs specified in *options* are directly passed to Apache spark for reading + and/or writing to this relation. + * `rowFormat` **(optional)** *(string)* *(default: empty)*: Specifies the row format of the files stored in this Hive table. This setting is only used when Flowman is used to create the Hive table and is ignored otherwise. This corresponds @@ -82,3 +97,14 @@ Name of the Hive database where the tables should be created in Specifies additional properties of the Hive table. This setting is only used when Flowman is used to create the Hive table and is ignored otherwise. This corresponds to the `TBLPROPERTIES` in a `CREATE TABLE` statement. + + +## Description + +When using Hive union tables as data sinks in a [`relation` target](../target/relation.md), then Flowman will manage the +whole lifecycle for you. This means that +* Hive tables will be created and migrated during `create` phase +* Hive tables will be populated with records and partitions will be added during `build` phase +* Hive tables will be truncated or individual partitions will be dropped during `clean` phase +* Hive tables will be removed during `destroy` phase + diff --git a/docs/spec/relation/hiveView.md b/docs/spec/relation/hiveView.md index 8aff19ed4..6cf57c021 100644 --- a/docs/spec/relation/hiveView.md +++ b/docs/spec/relation/hiveView.md @@ -25,8 +25,6 @@ relations: * `description` **(optional)** *(string)* *(default: empty)*: A description of the relation. This is purely for informational purpose. -* `options` **(optional)** *(map:string)* *(default: empty)*: - * `database` **(optional)** *(string)* *(default: empty)*: Defines the Hive database where the view is defined. When no database is specified, the table is accessed without any specific qualification, meaning that the default database will be used. diff --git a/docs/spec/relation/jdbc.md b/docs/spec/relation/jdbc.md index 71946e69a..860c9f09f 100644 --- a/docs/spec/relation/jdbc.md +++ b/docs/spec/relation/jdbc.md @@ -1,12 +1,33 @@ - # JDBC Relations +The JDBC relation allows you to access databases using a JDBC driver. Note that you need to put an appropriate JDBC +driver onto the classpath of Flowman. This can be done by using an appropriate plugin. + + ## Example -``` + +```yaml +# First specify a connection. This can be used by multiple JDBC relations +connections: + frontend: + driver: "$frontend_db_driver" + url: "$frontend_db_url" + username: "$frontend_db_username" + password: "$frontend_db_password" + +relations: + frontend_users: + kind: jdbc + # Specify the name of the connection to use + connection: frontend + # Specify the table + table: "users" ``` + ## Fields * `kind` **(mandatory)** *(type: string)*: `jdbc` + * `schema` **(optional)** *(type: schema)* *(default: empty)*: Explicitly specifies the schema of the JDBC source. Alternatively Flowman will automatically try to infer the schema. @@ -14,10 +35,6 @@ * `description` **(optional)** *(type: string)* *(default: empty)*: A description of the relation. This is purely for informational purpose. - * `options` **(optional)** *(type: map:string)* *(default: empty)*: - All key-value pairs specified in *options* are directly passed to Apache spark for reading - and/or writing to this relation. - * `connection` **(mandatory)** *(type: string)*: The *connection* field specifies the name of a [Connection](../connection/index.md) object which has to be defined elsewhere. diff --git a/docs/spec/relation/kafka.md b/docs/spec/relation/kafka.md new file mode 100644 index 000000000..f4e8f8d40 --- /dev/null +++ b/docs/spec/relation/kafka.md @@ -0,0 +1,51 @@ +# Kafka Relations + +The Kafak relation is provided via the `flowman-kafka` plugin. It allows you to access Kafka topics both in batch +and in stream processing, both as sources and as sinks + +## Example + +```yaml +relations: + some_kafka_relation: + kind: kafka + hosts: + - kafka-01 + - kafka-02 + topics: + - topic_a + - topic_b_.* + startOffset: earliest + endOffset: latest +``` + + +## Fields +* `kind` **(mandatory)** *(type: string)*: `kafka` + +* `options` **(optional)** *(map:string)* *(default: empty)*: + All options are passed directly to the reader/writer backend and are specific to each + supported format. + +* `schema` **(optional)** *(type: schema)* *(default: empty)*: + Explicitly specifies the schema of the JDBC source. Alternatively Flowman will automatically + try to infer the schema. + +* `description` **(optional)** *(type: string)* *(default: empty)*: + A description of the relation. This is purely for informational purpose. + +* `hosts` **(required)** *(type: list)* *(default: empty)*: +List of Kafka bootstrap servers to contact. This list does not need to be exhaustive, Flowman will automatically find + all other Kafka brokers of the Kafka cluster + +* `topics` **(required)** *(type: list)* *(default: empty)*: + List of Kafka topics. When reading, a topic may be specified as a regular expression for subscribing to multiple + topics. Writing only supports a single topic, and this may also not be a regular expression. + +* `startOffset` **(optional)** *(type: string)* *(default: earliest)*: + Flowman will only process messages starting from this offset. When you want to process all data available in Kafka, + you need to set this value to `earliest` (which is also the default). + +* `endOffset` **(optional)** *(type: string)* *(default: latest)*: + When reading from Kafka using batch processing, you can specify the latest offset to process. Per default this is set + to `latest` which means that messages including the latest one will be processed diff --git a/docs/spec/relation/local.md b/docs/spec/relation/local.md index edfe55995..ed1ad8da9 100644 --- a/docs/spec/relation/local.md +++ b/docs/spec/relation/local.md @@ -1,4 +1,3 @@ - # Local Relations In addition to working with file relations backed up by Hadoop compatible file systems (HDFS, S3, ...), Flowman also supports the local file system as backend for working with files. The @@ -6,15 +5,27 @@ implementation is independant of the normal Apache Spark data sources, therefore limited set of file formats are supported. ## Example -``` +```yaml +relations: + local: + kind: local + location: $outputPath + pattern: data.csv + format: csv + schema: + kind: inline + fields: + - name: str_col + type: string + - name: int_col + type: integer ``` ## Fields * `kind` **(mandatory)** *(string)*: `local` * `schema` **(optional)** *(schema)* *(default: empty)*: - Explicitly specifies the schema of the JDBC source. Alternatively Flowman will automatically - try to infer the schema. + Explicitly specifies the schema of the local relation. * `description` **(optional)** *(string)* *(default: empty)*: A description of the relation. This is purely for informational purpose. @@ -43,6 +54,14 @@ limited set of file formats are supported. ## Description +When using `local` relations as data sinks in a [`relation` target](../target/relation.md), then Flowman will manage the +whole lifecycle of the directory for you. This means that +* The directory specified in `location` will be created during `create` phase +* The directory specified in `location` will be populated with records or partitioning subdirectories will be added + during `build` phase +* The directory specified in `location` will be truncated or individual partitions will be dropped during `clean` phase +* The directory specified in `location` tables will be removed during `destroy` phase + ## Supported File Format ### CSV diff --git a/docs/spec/relation/mock.md b/docs/spec/relation/mock.md new file mode 100644 index 000000000..89303d5af --- /dev/null +++ b/docs/spec/relation/mock.md @@ -0,0 +1,26 @@ +# Mock Relation + +A `mock` relation works similar to a [`null`](null.md) relation in the sense that it does only return empty data. +The main difference is that a `mock` relation picks up the schema from a different relation. It's main use case is +within test cases where you want to replace physical data sources by empty mocked data sources with a minimum amount +of work. + +## Example +```yaml +relations: + mocked_relation: + kind: mock + relation: real_relation +``` + +## Fields +* `kind` **(mandatory)** *(string)*: `null` or `empty` + +* `relation` **(optional)** *(string)* *(default: empty)*: + Specify the base relation to be mocked. If no relation is specified, a relation with the same name will be mocked. + Of course this doesn't work within the same project on project level. But it works well when the `mock` relation + is created inside a test. + +* `records` **(optional)** *(type: list:array)* *(default: empty)*: + An optional list of records to be returned. Note that this list needs to include values for any partition columns + of the mocked relation. The partition values need to be appended at the end. diff --git a/docs/spec/relation/null.md b/docs/spec/relation/null.md index ae01241d8..cde452fbe 100644 --- a/docs/spec/relation/null.md +++ b/docs/spec/relation/null.md @@ -1 +1,32 @@ # Null Relation + +A `null` relation is a dummy relation, which can be used either for creating empty +data (but with a schema) during read operations or for throwing away records in +write operations. + +## Example +```yaml +relations: + empty: + kind: null + schema: + kind: embedded + fields: + - name: id + type: string + - name: amount + type: double +``` + +## Fields +* `kind` **(mandatory)** *(string)*: `null` or `empty` + +* `schema` **(optional)** *(schema)* *(default: empty)*: + Explicitly specifies the schema of the null relation. + +* `description` **(optional)** *(string)* *(default: empty)*: + A description of the relation. This is purely for informational purpose. + +* `partitions` **(optional)** *(list:partition)* *(default: empty)*: + Even though a `null` relation does not provide any physical storage, it still optionally + provides virtual partition columns. diff --git a/docs/spec/relation/template.md b/docs/spec/relation/template.md index b8c867ee4..ea71b2694 100644 --- a/docs/spec/relation/template.md +++ b/docs/spec/relation/template.md @@ -4,6 +4,7 @@ ```yaml relations: + # First define the template relation itself structured_macro: kind: hiveUnionTable viewDatabase: "dqm" @@ -20,6 +21,7 @@ relations: kind: mapping mapping: ${schema} + # Now use the template and replace some of the used variables fee: kind: template relation: structured_macro diff --git a/docs/spec/schema/avro.md b/docs/spec/schema/avro.md index 85b18ea03..e16b2d392 100644 --- a/docs/spec/schema/avro.md +++ b/docs/spec/schema/avro.md @@ -5,15 +5,37 @@ The *Avro schema* refers to a schema conforming to the Avro standard ```yaml kind: avro file: "${project.basedir}/test/data/results/${relation}/schema.json" +nullable: true +``` + +```yaml +kind: avro +spec: | + { + "type": "record", + "namespace": "", + "name": "test_schema", + "doc": "Some Documentation", + "fields": [ + { + "doc": "AccessDateTime as a string", + "type": "string", + "name": "AccessDateTime", + "order": "ignore" + } + ] + } ``` ## Fields * `kind` **(mandatory)** *(type: string)*: `avro` * `file` **(optional)** *(type: string)*: -Specifies the path of a schema file. + Specifies the path of a schema file. * `url` **(optional)** *(type: string)*: -Specifies the URL of a schema. + Specifies the URL of a schema. * `spec` **(optional)** *(type: string)*: -Specifies the schema itself as an embedded string + Specifies the schema itself as an embedded string +* `nullable` **(optional)** *(type: boolean)* *(default: false)*: + If set to true, all fields will be made *nullable*. Note that you can only use one of `file`, `url` or `spec`. diff --git a/docs/spec/target/copy.md b/docs/spec/target/copy.md index ff7ef2a6b..6f682e1a1 100644 --- a/docs/spec/target/copy.md +++ b/docs/spec/target/copy.md @@ -28,13 +28,26 @@ targets: * `kind` **(mandatory)** *(type: string)*: `copy` * `source` **(mandatory)** *(type: dataset)*: -Specifies the source data set to be copied +Specifies the source data set to be copied from. * `target` **(mandatory)** *(type: dataset)*: -Specifies the target data set to be copied +Specifies the target data set to be copied to. * `schema` **(optional)**: -Optionally specify a schema to be written. +Optionally specify a schema file to be written to. This file will be created in the `build` phase. The schema contains +two sub elements `format` and `file`. + +* `parallelism` **(optional)** *(type: integer)* *(default=16)*: +This specifies the parallelism to be used when writing data. The parallelism equals the number +of files being generated in HDFS output and also equals the maximum number of threads that are used in total in all +Spark executors to produce the output. If `parallelism` is set to zero or to a negative number, Flowman will not +coalesce any partitions and generate as many files as Spark partitions. The default value is controlled by the +Flowman config variable `floman.default.target.parallelism`. + +* `rebalance` **(optional)** *(type: bool)* *(default=false)*: +Enables rebalancing the size of all partitions by introducing an additional internal shuffle operation. Each partition +and output file will contain approximately the same number of records. The default value is controlled by the +Flowman config variable `floman.default.target.rebalance`. ## Supported Phases diff --git a/docs/spec/target/file.md b/docs/spec/target/file.md index 87129440f..ebbe95bc3 100644 --- a/docs/spec/target/file.md +++ b/docs/spec/target/file.md @@ -35,17 +35,21 @@ Specifies the behavior when data or table or partition already exists. Options i * `append`: append the data. * `ignore`: ignore the operation (i.e. no-op). * `error` or `errorifexists`: throw an exception at runtime . +The default value is controlled by the Flowman config variable `floman.default.target.outputMode`. * `partition` **(optional)** *(type: map:string)* *(default=empty)*: * `parallelism` **(optional)** *(type: integer)* *(default=16)*: This specifies the parallelism to be used when writing data. The parallelism equals the number -of files being generated in HDFS output and also equals the maximum number of threads that -are used in total in all Spark executors to produce the output. +of files being generated in HDFS output and also equals the maximum number of threads that are used in total in all +Spark executors to produce the output. If `parallelism` is set to zero or to a negative number, Flowman will not +coalesce any partitions and generate as many files as Spark partitions. The default value is controlled by the +Flowman config variable `floman.default.target.parallelism`. * `rebalance` **(optional)** *(type: bool)* *(default=false)*: -Enables rebalancing the size of all partitions by introducing an additional internal shuffle -operation. Each partition will contain approximately the same number of records. +Enables rebalancing the size of all partitions by introducing an additional internal shuffle operation. Each partition +and output file will contain approximately the same number of records. The default value is controlled by the +Flowman config variable `floman.default.target.rebalance`. ## Supported Phases diff --git a/docs/spec/target/merge-files.md b/docs/spec/target/merge-files.md index ca11afb8a..a512d96af 100644 --- a/docs/spec/target/merge-files.md +++ b/docs/spec/target/merge-files.md @@ -1 +1,28 @@ # Merge Files Target + +The `mergeFiles` target merges all files within a source directory to a single target file. This only makes sense if +the file format allows appending by simple bytewise concatenation. This is the case for textual files, like CSV and +TSV files. + +## Example: +```yaml +targets: + csv_merge: + kind: mergeFiles + source: "s3://my-bucket/my-spark-output/" + target: "file:///srv/exports/my-export.csv" + overwrite: true +``` + +## Fields + * `kind` **(mandatory)** *(string)*: `mergeFiles` + * `source` **(mandatory)** *(string)*: Source directory containing all files to be concatenated + * `target` **(optional)** *(string)*: Name of single target file + * `overwrite` **(optional)** *(boolean)* *(default: true)*: + + +## Supported Phases +* `BUILD` +* `VERIFY` +* `TRUNCATE` +* `DESTROY` diff --git a/docs/spec/target/relation.md b/docs/spec/target/relation.md index 7e92bb5f3..3c3727e0a 100644 --- a/docs/spec/target/relation.md +++ b/docs/spec/target/relation.md @@ -6,7 +6,7 @@ the physical location or connection, the format and so on. ## Example -``` +```yaml targets: stations: kind: relation @@ -35,23 +35,29 @@ Specifies the behavior when data or table or partition already exists. Options i * `append`: append the data. * `ignore`: ignore the operation (i.e. no-op). * `error` or `errorifexists`: throw an exception at runtime . +The default value is controlled by the Flowman config variable `floman.default.target.outputMode`. * `partition` **(optional)** *(type: map:string)* *(default=empty)*: * `parallelism` **(optional)** *(type: integer)* *(default=16)*: This specifies the parallelism to be used when writing data. The parallelism equals the number -of files being generated in HDFS output and also equals the maximum number of threads that -are used in total in all Spark executors to produce the output. +of files being generated in HDFS output and also equals the maximum number of threads that are used in total in all +Spark executors to produce the output. If `parallelism` is set to zero or to a negative number, Flowman will not +coalesce any partitions and generate as many files as Spark partitions. The default value is controlled by the +Flowman config variable `floman.default.target.parallelism`. * `rebalance` **(optional)** *(type: bool)* *(default=false)*: -Enables rebalancing the size of all partitions by introducing an additional internal shuffle -operation. Each partition will contain approximately the same number of records. +Enables rebalancing the size of all partitions by introducing an additional internal shuffle operation. Each partition +and output file will contain approximately the same number of records. The default value is controlled by the +Flowman config variable `floman.default.target.rebalance`. ## Description The `relation` target will write the output of a mapping specified via the `mapping` field into the relation specified -in `relation`. +in `relation`. If the `mapping` field is not specified, then Flowman will only perform actions for creating and removing +the relation during the `CREATE`, `TRUNCATE` and `DESTROY` phase. In this case, the `BUILD` phase is a no-op for this +target. ## Supported Phases diff --git a/docs/spec/target/validate.md b/docs/spec/target/validate.md new file mode 100644 index 000000000..a002778a8 --- /dev/null +++ b/docs/spec/target/validate.md @@ -0,0 +1,37 @@ +# Validate Target + +The `validate` target is used to execute a set of assertions in advance of the `CREATE` and `BUILD` phases. This is a +good place to validate any assumptions on the input data like primary key or record count. + + +## Example + +```yaml +targets: + validate_input: + kind: validate + assertions: + assert_primary_key: + kind: sql + tests: + - query: "SELECT id,count(*) FROM source GROUP BY id HAVING count(*) > 0" + expected: [] + + assert_measurement_count: + kind: sql + tests: + - query: "SELECT COUNT(*) FROM measurements_extracted" + expected: 2 +``` + +## Fields + +* `kind` **(mandatory)** *(type: string)*: `validate` + +* `assertions` **(optional)** *(type: map:assertion)*: + Map of [assertions](../assertion/index.md) to be executed. The validation is marked as *failed* if a single + assertion fails. + + +## Supported Phases +* `VALIDATE` - The specified assertions will be run in the `VALIDATE` phase before the `CREATE` and `BUILD` phases. diff --git a/docs/spec/target/verify.md b/docs/spec/target/verify.md new file mode 100644 index 000000000..3792597a5 --- /dev/null +++ b/docs/spec/target/verify.md @@ -0,0 +1,39 @@ +# Verify Target + +The `verify` target is used to execute a set of assertions *after* all targets have been built. This can be used to +verify the results to ensure that all processing was correct. In most cases it is advisable to use the similar +[`validate`](validate.md) build target, which is executed in advance of the `CREATE` and `BUILD` phase and can be +used to validate any assumptions on the incoming data. + +## Example + +```yaml +targets: + verify_output: + kind: verify + assertions: + assert_primary_key: + kind: sql + tests: + - query: "SELECT id,count(*) FROM output_table GROUP BY id HAVING count(*) > 0" + expected: [] + - query: "SELECT id,count(*) FROM output_cube GROUP BY id HAVING count(*) > 0" + expected: [] + + assert_measurement_count: + kind: sql + query: "SELECT COUNT(*) FROM measurements_extracted" + expected: 2 +``` + +## Fields + +* `kind` **(mandatory)** *(type: string)*: `verify` + +* `assertions` **(optional)** *(type: map:assertion)*: + Map of [assertions](../assertion/index.md) to be executed. The verification is marked as *failed* if a single + assertion fails. + + +## Supported Phases +* `VERIDY` - The specified assertions will be run in the `VERIFY` phase after the `CREATE` and `BUILD` phases. diff --git a/docs/spec/test/execution.md b/docs/spec/test/execution.md new file mode 100644 index 000000000..996412021 --- /dev/null +++ b/docs/spec/test/execution.md @@ -0,0 +1,68 @@ +# Flowman Test Execution + +The different aspects of each test are execution in a specific order by Flowman. + +1. A test environment is setup by adding or modifying any variable specified in the `environment` section +2. All relations and mappings specified as overrides are created to replace and extend the original entities. +3. All `targets` and `fixtures` are executed. Data dependencies are used to determine a correct execution order. + The execution includes the `CREATE`, `BUILD` and `VERIFY` phases. +4. All `assertions` are executed +5. All `targets` and `fixtures` are cleaned up by executing the `DESTROY` phase. + + + +## Example +```yaml +tests: + common_fixtures: + overrideMappings: + mapping_a: + kind: mock + records: + - [1,"cat","black"] + - [2,"dog","brown"] + + overrideRelations: + raw_data: + kind: mock + records: + - "042599999963897201301010000I" + + test_aggregation: + description: "Test all aggregations" + extends: + - common_fixtures + + environment: + - some_value=12 + + targets: + - build_cube + + fixtures: + prepare_additional_data: + kind: relation + relation: additional_data + mapping: mapping_a + + assertions: + measurements_extracted: + kind: sql + description: "Measurements are extracted correctly" + tests: + - query: "SELECT * FROM measurements_extracted" + expected: + - [1,63897,999999,10.6,2013-01-01,0.9,1,124,CRN05,1,0000,H] + - [1,63897,999999,10.6,2013-01-01,1.5,1,124,CRN05,1,0005,H] + - query: "SELECT COUNT(*) FROM measurements_extracted" + expected: 2 +``` + +In the example above, the following steps are executed: + +1. A new execution environment is created containing all original variables plus the `some_value` variable. +2. The override mapping `mapping_a` and override relation `raw_data` are added to the execution environment. +3. The targets and fixtures `build_cube` and `prepare_additional_data` are executed with phases `CREATE`, `BUILD` and + `VERIFY`. +4. The assertion `measurement_extracted` is run with all tests executed. +5. The targets and fixtures `build_cube` and `prepare_additional_data` are cleaned up with phase `DESTROY`. diff --git a/docs/spec/test/index.md b/docs/spec/test/index.md new file mode 100644 index 000000000..1dd8ee640 --- /dev/null +++ b/docs/spec/test/index.md @@ -0,0 +1,90 @@ +# Tests + +Flowman supports test cases as first order entities. These are used for creating self contained unittests to verify +the correctness of the specified data flow. Flowman provides mocking capabilities for mappings and relations such that +physical relations can be replaced by mocked virtual relations and mappings. + +## Example +```yaml +tests: + common_fixtures: + overrideMappings: + mapping_a: + kind: mock + records: + - [1,"cat","black"] + - [2,"dog","brown"] + + overrideRelations: + raw_data: + kind: mock + records: + - "042599999963897201301010000I" + + test_aggregation: + description: "Test all aggregations" + extends: + - common_fixtures + + environment: + - some_value=12 + + targets: + - build_cube + + fixtures: + prepare_additional_data: + kind: relation + relation: additional_data + mapping: mapping_a + + assertions: + measurements_extracted: + kind: sql + description: "Measurements are extracted correctly" + tests: + - query: "SELECT * FROM measurements_extracted" + expected: + - [1,63897,999999,10.6,2013-01-01,0.9,1,124,CRN05,1,0000,H] + - [1,63897,999999,10.6,2013-01-01,1.5,1,124,CRN05,1,0005,H] + - query: "SELECT COUNT(*) FROM measurements_extracted" + expected: 2 +``` + +## Fields +* `description` **(optional)** *(type: string)*: + A textual description of the test + +* `environment` **(optional)** *(type: list:string)*: + A list of `key=value` pairs for defining or overriding environment variables which can be accessed in expressions. + +* `targets`: **(optional)** *(type: list:string)*: + List of targets to be built as part of the test. The targets need to be defined in the regular `targets` section. + These targets represent the entities to be tested. + +* `fixtures`: **(optional)** *(type: map:target)*: + List of additional targets to be executed as *test fixtures*. These targets are defined directly within the test case + and are typically used to produce phyiscal data, which then is pciked up by some test cases. + +* `overrideMappings`: **(optional)** *(type: map:mapping)*: + This section allows you to override existing mappings with new definitions. Typically this is used for mocking the + output of some mappings by replacing those with [`values`](../mapping/values.md) or [`mock`](../mapping/mock.md) + mappings. You can also specify new mappings in this section. + +* `overrideRelations`: **(optional)** *(type: map:relation)*: + This section allows you to override existing relations with new definitions. You can also specify new relations in + this section. + +* `assertions`: **(optional)** *(type: map:assertion)*: + This section contains the set of [assertions](../assertion/index.md) to be executed. The test is considered to have + failed if a single assertion has failed. + + +## Sub Pages +```eval_rst +.. toctree:: + :maxdepth: 1 + :glob: + + * +``` diff --git a/examples/plugin-example/README.md b/examples/plugin-example/README.md deleted file mode 100644 index a0d546185..000000000 --- a/examples/plugin-example/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Preparing the Environment - -Since this is a super simple example, no preparations are required - -# Using flowman - - flowexec -f examples/plugin-example project run diff --git a/examples/plugin-example/job/main.yml b/examples/plugin-example/job/main.yml deleted file mode 100644 index 8bd482f6b..000000000 --- a/examples/plugin-example/job/main.yml +++ /dev/null @@ -1,9 +0,0 @@ -targets: - hello: - kind: hello-world - -jobs: - main: - description: "Simply print 'Hello World'" - targets: - - hello diff --git a/examples/plugin-example/project.yml b/examples/plugin-example/project.yml deleted file mode 100644 index 9a60cfd98..000000000 --- a/examples/plugin-example/project.yml +++ /dev/null @@ -1,5 +0,0 @@ -name: "plugin-example" -version: "1.0" - -modules: - - job diff --git a/examples/sftp-upload/job/main.yml b/examples/sftp-upload/job/main.yml index 1845f3aa7..008f7e2ca 100644 --- a/examples/sftp-upload/job/main.yml +++ b/examples/sftp-upload/job/main.yml @@ -1,7 +1,6 @@ targets: upload: kind: sftpUpload - description: Upload file via SFTP connection: sftp source: "${project.basedir}/data/example.csv" target: "${sftp_target}/example.csv" diff --git a/examples/weather/job/main.yml b/examples/weather/job/main.yml index 968cd7604..5424d77e6 100644 --- a/examples/weather/job/main.yml +++ b/examples/weather/job/main.yml @@ -1,10 +1,15 @@ jobs: + # Define the 'main' job, which implicitly is used whenever you build the whole project main: + # Add a parameter for selecting the year to process. This will create an environment variable `$year` which + # can be accessed from within other entities like mappings, relations, etc parameters: - name: year type: Integer default: 2013 + # List all targets which should be built as part of the `main` job targets: - measurements - stations - aggregates + - validate_stations_raw diff --git a/examples/weather/mapping/aggregates.yml b/examples/weather/mapping/aggregates.yml index 99ec8c539..a6a63b88c 100644 --- a/examples/weather/mapping/aggregates.yml +++ b/examples/weather/mapping/aggregates.yml @@ -1,4 +1,5 @@ mappings: + # Create some aggregates containing min/max/avg metrics of wind speed and temperature aggregates: kind: aggregate input: facts diff --git a/examples/weather/mapping/facts.yml b/examples/weather/mapping/facts.yml index daefa134b..ac79c1c3c 100644 --- a/examples/weather/mapping/facts.yml +++ b/examples/weather/mapping/facts.yml @@ -1,19 +1,25 @@ mappings: - measurements-joined: + # The `measurements-joined` mapping will add station metadata to measurements + measurements_joined: # Join together measurements and stations kind: join mode: left + # Specify list of input mappings to be joined inputs: - measurements - stations + # Specify columns to use for joining. Both input mappings need to contain both columns, merging is performed + # whenever the values of both columns match in both input mappings columns: - usaf - wban + # Replace invalid values with NULLs facts: - # Replace invalid values with NULLs kind: extend - input: measurements-joined + input: measurements_joined + # Replace existing columns with new values, which will contain NULL values whenever the quality flags + # indicate so columns: wind_direction: "CASE WHEN wind_direction_qual=1 THEN wind_direction END" wind_speed: "CASE WHEN wind_speed_qual=1 THEN wind_speed END" diff --git a/examples/weather/mapping/measurements.yml b/examples/weather/mapping/measurements.yml index d579de3af..2d59b83a1 100644 --- a/examples/weather/mapping/measurements.yml +++ b/examples/weather/mapping/measurements.yml @@ -1,20 +1,21 @@ mappings: # This mapping refers to the "raw" relation and reads in data from the source in S3 - measurements-raw: + measurements_raw: kind: read - relation: measurements-raw + relation: measurements_raw partitions: year: $year columns: raw_data: String - measurements-extracted: + # Extract multiple columns from the raw measurements data using SQL SUBSTR functions + measurements_extracted: kind: select - input: measurements-raw + input: measurements_raw columns: usaf: "SUBSTR(raw_data,5,6)" wban: "SUBSTR(raw_data,11,5)" - date: "SUBSTR(raw_data,16,8)" + date: "TO_DATE(SUBSTR(raw_data,16,8), 'yyyyMMdd')" time: "SUBSTR(raw_data,24,4)" report_type: "SUBSTR(raw_data,42,5)" wind_direction: "SUBSTR(raw_data,61,3)" diff --git a/examples/weather/mapping/stations.yml b/examples/weather/mapping/stations.yml index 22e798fd6..fb72b34df 100644 --- a/examples/weather/mapping/stations.yml +++ b/examples/weather/mapping/stations.yml @@ -1,10 +1,10 @@ mappings: # This mapping refers to the "raw" relation and reads in data from the source in S3 - stations-raw: - kind: read - relation: stations-raw + stations_raw: + kind: readRelation + relation: stations_raw # This mapping refers to the Parquet relation and reads in data from the local file system stations: - kind: read + kind: readRelation relation: stations diff --git a/examples/weather/model/aggregates.yml b/examples/weather/model/aggregates.yml index 6ad8e8f5d..aba6ea933 100644 --- a/examples/weather/model/aggregates.yml +++ b/examples/weather/model/aggregates.yml @@ -1,13 +1,19 @@ relations: aggregates: kind: file + # Specify the file format to use format: parquet + # Specify the base directory where all data is stored. This location does not include the partition pattern location: "$basedir/aggregates/" + # Specify the pattern how to identify files and/or partitions. This pattern is relative to the `location` pattern: "${year}" + # Add partition column, which can be used in the `pattern` partitions: - name: year type: integer granularity: 1 + # Specify an optional schema here. It is always recommended to explicitly specify a schema for every relation + # and not just let data flow from a mapping into a target. schema: kind: embedded fields: diff --git a/examples/weather/model/measurements-raw.yml b/examples/weather/model/measurements-raw.yml index 1848c3356..d114cc6b4 100644 --- a/examples/weather/model/measurements-raw.yml +++ b/examples/weather/model/measurements-raw.yml @@ -1,5 +1,5 @@ relations: - measurements-raw: + measurements_raw: kind: file format: text location: "s3a://dimajix-training/data/weather/" diff --git a/examples/weather/model/measurements.yml b/examples/weather/model/measurements.yml index d3a5b210a..8e069aa1a 100644 --- a/examples/weather/model/measurements.yml +++ b/examples/weather/model/measurements.yml @@ -9,27 +9,5 @@ relations: type: integer granularity: 1 schema: - kind: embedded - fields: - - name: usaf - type: STRING - - name: wban - type: STRING - - name: date - type: STRING - - name: time - type: STRING - - name: wind_direction - type: STRING - - name: wind_direction_qual - type: STRING - - name: wind_observation - type: STRING - - name: wind_speed - type: FLOAT - - name: wind_speed_qual - type: STRING - - name: air_temperature - type: FLOAT - - name: air_temperature_qual - type: STRING + kind: avro + file: "${project.basedir}/schema/measurements.avsc" diff --git a/examples/weather/model/stations-raw.yml b/examples/weather/model/stations-raw.yml index 25f736165..58b9f55e8 100644 --- a/examples/weather/model/stations-raw.yml +++ b/examples/weather/model/stations-raw.yml @@ -1,5 +1,5 @@ relations: - stations-raw: + stations_raw: kind: file format: csv location: "s3a://dimajix-training/data/weather/isd-history/" @@ -8,28 +8,17 @@ relations: encoding: "UTF-8" quote: "\"" header: "true" + dateFormat: "yyyyMMdd" schema: - kind: embedded - fields: - - name: usaf - type: STRING - - name: wban - type: STRING - - name: name - type: STRING - - name: country - type: STRING - - name: state - type: STRING - - name: icao - type: STRING - - name: latitude - type: FLOAT - - name: longitude - type: FLOAT - - name: elevation - type: FLOAT - - name: date_begin - type: STRING - - name: date_end - type: STRING + kind: avro + file: "${project.basedir}/schema/stations.avsc" + + +targets: + validate_stations_raw: + kind: validate + assertions: + check_primary_key: + kind: sql + query: "SELECT usaf,wban,COUNT(*) FROM stations_raw GROUP BY usaf,wban HAVING COUNT(*) > 1" + expected: [ ] diff --git a/examples/weather/model/stations.yml b/examples/weather/model/stations.yml index d3087a614..c7f21f0d7 100644 --- a/examples/weather/model/stations.yml +++ b/examples/weather/model/stations.yml @@ -4,27 +4,5 @@ relations: format: parquet location: "$basedir/stations/" schema: - kind: embedded - fields: - - name: usaf - type: STRING - - name: wban - type: STRING - - name: name - type: STRING - - name: country - type: STRING - - name: state - type: STRING - - name: icao - type: STRING - - name: latitude - type: FLOAT - - name: longitude - type: FLOAT - - name: elevation - type: FLOAT - - name: date_begin - type: STRING - - name: date_end - type: STRING + kind: avro + file: "${project.basedir}/schema/stations.avsc" diff --git a/examples/weather/project.yml b/examples/weather/project.yml index a18f6cde5..45979cd4e 100644 --- a/examples/weather/project.yml +++ b/examples/weather/project.yml @@ -7,3 +7,4 @@ modules: - target - job - config + - test diff --git a/examples/weather/schema/measurements.avsc b/examples/weather/schema/measurements.avsc new file mode 100644 index 000000000..c81633a53 --- /dev/null +++ b/examples/weather/schema/measurements.avsc @@ -0,0 +1,51 @@ +{ + "type": "record", + "namespace": "", + "name": "stations", + "fields": [ + { + "name": "usaf", + "type": "int" + }, + { + "name": "wban", + "type": "int" + }, + { + "name": "date", + "type": { "type": "int", "logicalType": "date" } + }, + { + "name": "time", + "type": "string" + }, + { + "name": "wind_direction", + "type": [ "string", "null" ] + }, + { + "name": "wind_direction_qual", + "type": "string" + }, + { + "name": "wind_observation", + "type": [ "string", "null" ] + }, + { + "name": "wind_speed", + "type": [ "float", "null" ] + }, + { + "name": "wind_speed_qual", + "type": "string" + }, + { + "name": "air_temperature", + "type": [ "float", "null" ] + }, + { + "name": "air_temperature_qual", + "type": "string" + } + ] +} diff --git a/examples/weather/schema/stations.avsc b/examples/weather/schema/stations.avsc new file mode 100644 index 000000000..13f208e46 --- /dev/null +++ b/examples/weather/schema/stations.avsc @@ -0,0 +1,51 @@ +{ + "type": "record", + "namespace": "", + "name": "stations", + "fields": [ + { + "name": "usaf", + "type": "int" + }, + { + "name": "wban", + "type": "int" + }, + { + "name": "name", + "type": [ "string", "null" ] + }, + { + "name": "country", + "type": [ "string", "null" ] + }, + { + "name": "state", + "type": [ "string", "null" ] + }, + { + "name": "icao", + "type": [ "string", "null" ] + }, + { + "name": "latitude", + "type": [ "float", "null" ] + }, + { + "name": "longitude", + "type": [ "float", "null" ] + }, + { + "name": "elevation", + "type": [ "float", "null" ] + }, + { + "name": "date_begin", + "type": [ { "type": "int", "logicalType": "date" }, "null" ] + }, + { + "name": "date_end", + "type": [ { "type": "int", "logicalType": "date" }, "null" ] + } + ] +} diff --git a/examples/weather/target/measurements.yml b/examples/weather/target/measurements.yml index 11e8325dd..73e05bad0 100644 --- a/examples/weather/target/measurements.yml +++ b/examples/weather/target/measurements.yml @@ -1,7 +1,7 @@ targets: measurements: kind: relation - mapping: measurements-extracted + mapping: measurements_extracted relation: measurements partition: year: $year diff --git a/examples/weather/target/stations.yml b/examples/weather/target/stations.yml index 85927aaec..ad32188b5 100644 --- a/examples/weather/target/stations.yml +++ b/examples/weather/target/stations.yml @@ -1,5 +1,5 @@ targets: stations: kind: relation - mapping: stations-raw + mapping: stations_raw relation: stations diff --git a/examples/weather/test/test-facts.yml b/examples/weather/test/test-facts.yml new file mode 100644 index 000000000..66a29e63f --- /dev/null +++ b/examples/weather/test/test-facts.yml @@ -0,0 +1,60 @@ +tests: + test_facts: + environment: + - year=2013 + + overrideMappings: + measurements: + kind: mock + records: + - year: $year + date: $year-01-02 + time: 0100 + usaf: 999999 + wban: 63897 + wind_direction_qual: 9 + wind_speed_qual: 9 + air_temperature_qual: 9 + - year: $year + date: $year-01-02 + time: 0100 + usaf: 99999 + wban: 63897 + wind_direction_qual: 9 + wind_speed_qual: 9 + air_temperature_qual: 9 + - year: $year + date: $year-01-02 + time: 0100 + usaf: 999999 + wban: 3897 + wind_direction_qual: 9 + wind_speed_qual: 9 + air_temperature_qual: 9 + + stations: + kind: mock + records: + - usaf: 999999 + wban: 63897 + country: US + - usaf: 999999 + wban: 1 + country: DE + - usaf: 1 + wban: 63897 + country: DE + + targets: + - validate_stations_raw + + assertions: + measurements_joined: + kind: sql + description: "Measurements are joined correctly" + tests: + - query: "SELECT year,usaf,wban,country FROM measurements_joined" + expected: + - [$year,999999,63897,US] + - [$year,99999,63897,null] + - [$year,999999,3897,null] diff --git a/examples/weather/test/test-measurements.yml b/examples/weather/test/test-measurements.yml new file mode 100644 index 000000000..fd2d514e0 --- /dev/null +++ b/examples/weather/test/test-measurements.yml @@ -0,0 +1,23 @@ +tests: + test_measurements: + environment: + - year=2013 + + overrideMappings: + measurements_raw: + kind: mock + records: + - "042599999963897201301010000I+32335-086979CRN05+004899999V0201241H000919999999N999999999+01061+99999999999ADDAA101000091AO105000091CF1106610CF2000010CG1+0307310CG2+0320310CG3+0310810CN1012610012010999990CN2+999990+0127100010CN30149981004935110CN40100000104001014010CO199-06CR10510210CT1+010510CT2+010610CT3+010610CU1+999990000310CU2+999990000310CU3+999990000310CV1+010510999990+011410999990CV2+010610999990+011410999990CV3+010610999990+011410999990CW111370102965010KA1010M+01141KA2010N+01061KF1+01121OB10050011101311099999900098410" + - "013399999963897201301010005I+32335-086979CRN05+004899999V0201241H001519999999N999999999+01061+99999999999ADDAO105000091CG1+0307310CG2+0320410CG3+0310810CO199-06CT1+010610CT2+010610CT3+010610CW111370102965010OB10050022101251099999900058710" + + assertions: + measurements_extracted: + kind: sql + description: "Measurements are extracted correctly" + tests: + - query: "SELECT * FROM measurements_extracted" + expected: + - [1,63897,999999,10.6,2013-01-01,0.9,1,124,CRN05,1,0000,H] + - [1,63897,999999,10.6,2013-01-01,1.5,1,124,CRN05,1,0005,H] + - query: "SELECT COUNT(*) FROM measurements_extracted" + expected: 2 diff --git a/flowman-core/pom.xml b/flowman-core/pom.xml index ee4cea59b..a4c06d881 100644 --- a/flowman-core/pom.xml +++ b/flowman-core/pom.xml @@ -9,26 +9,30 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 .. - + - CDH-5.15 + default + + + true + - com.databricks + org.apache.spark spark-avro_${scala.api_version} - ${spark-avro.version} + ${spark.version} - + - spark-2.3 + CDH-5.15 com.databricks @@ -37,17 +41,15 @@ + + - spark-2.4 - - - true - + spark-2.3 - org.apache.spark + com.databricks spark-avro_${scala.api_version} - ${spark.version} + ${spark-avro.version} @@ -173,7 +175,7 @@ com.typesafe.slick slick_${scala.api_version} - 3.2.3 + 3.3.3 compile @@ -198,11 +200,6 @@ org.scalamock scalamock_${scala.api_version} - - - org.mockito - mockito-core - diff --git a/flowman-core/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ClassAnnotationHandler b/flowman-core/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ClassAnnotationHandler index a0725ef89..b708ff53e 100644 --- a/flowman-core/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ClassAnnotationHandler +++ b/flowman-core/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ClassAnnotationHandler @@ -1,2 +1,2 @@ -com.dimajix.flowman.templating.TemplateObjectHandler +com.dimajix.flowman.spi.TemplateObjectHandler diff --git a/flowman-core/src/main/scala/com/dimajix/common/MapIgnoreCase.scala b/flowman-core/src/main/scala/com/dimajix/common/MapIgnoreCase.scala index dc78d32c8..681a1709e 100644 --- a/flowman-core/src/main/scala/com/dimajix/common/MapIgnoreCase.scala +++ b/flowman-core/src/main/scala/com/dimajix/common/MapIgnoreCase.scala @@ -32,6 +32,12 @@ object MapIgnoreCase { def apply[T](seq:Seq[(String,T)]) : MapIgnoreCase[T] = { new MapIgnoreCase[T](seq.map(kv => kv._1.toLowerCase(Locale.ROOT) -> ((kv._1, kv._2))).toMap) } + def apply[T](head:(String,T)) : MapIgnoreCase[T] = { + apply(Seq(head)) + } + def apply[T](head:(String,T), tail:(String,T)*) : MapIgnoreCase[T] = { + apply(head +: tail.toSeq) + } } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/config/ConfigBuilder.scala b/flowman-core/src/main/scala/com/dimajix/flowman/config/ConfigBuilder.scala index f70fb9144..f29b8e150 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/config/ConfigBuilder.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/config/ConfigBuilder.scala @@ -4,6 +4,14 @@ import java.io.File private object ConfigHelpers { + private lazy val classLoader = { + val cl = Thread.currentThread.getContextClassLoader + if (cl == null) + classOf[Configuration].getClassLoader + else + cl + } + def toNumber[T](s: String, converter: String => T, key: String, configType: String): T = { try { @@ -30,6 +38,21 @@ private object ConfigHelpers { def seqToString[T](v: Seq[T], stringConverter: T => String): String = { v.map(stringConverter).mkString(",") } + + def stringToClass[T](s: String, key:String, xface:Class[T]) : Class[_ <: T] = { + try { + val clazz = Class.forName(s, true, classLoader) + clazz.asSubclass(xface) + } + catch { + case e: ClassNotFoundException => + throw new RuntimeException(e) + } + } + + def classToString[T](clazz:Class[T]) : String = { + clazz.getCanonicalName + } } @@ -127,4 +150,8 @@ case class ConfigBuilder(key: String) { def fileConf: TypedConfigBuilder[File] = { new TypedConfigBuilder(this, v => new File(v)) } + + def classConf[T](xface:Class[T]): TypedConfigBuilder[Class[_ <: T]] = { + new TypedConfigBuilder(this, stringToClass(_, key, xface), classToString(_)) + } } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/config/FlowmanConf.scala b/flowman-core/src/main/scala/com/dimajix/flowman/config/FlowmanConf.scala index 4536b16e8..a7281e140 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/config/FlowmanConf.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/config/FlowmanConf.scala @@ -20,7 +20,9 @@ import java.io.File import java.nio.file.FileSystem import java.util.NoSuchElementException +import com.dimajix.flowman.execution.Executor import com.dimajix.flowman.execution.OutputMode +import com.dimajix.flowman.execution.SimpleExecutor import com.dimajix.spark.features @@ -61,11 +63,23 @@ object FlowmanConf { .doc("Consider all targets as being 'dirty' without checking") .booleanConf .createWithDefault(false) + val EXECUTION_EXECUTOR_CLASS = buildConf("flowman.execution.execution.class") + .doc("Class name for executing targets") + .classConf(classOf[Executor]) + .createWithDefault(classOf[SimpleExecutor]) val DEFAULT_TARGET_OUTPUT_MODE = buildConf("flowman.default.target.outputMode") .doc("Default output mode of targets") .stringConf .createWithDefault(OutputMode.OVERWRITE.toString) + val DEFAULT_TARGET_REBALANCE = buildConf("floman.default.target.rebalance") + .doc("Rebalances all outputs before writing") + .booleanConf + .createWithDefault(false) + val DEFAULT_TARGET_PARALLELISM = buildConf("floman.default.target.parallelism") + .doc("Uses the specified number of partitions for writing targets. -1 disables") + .intConf + .createWithDefault(16) } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/AbstractContext.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/AbstractContext.scala index 285a1951e..fa1ebd162 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/AbstractContext.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/AbstractContext.scala @@ -16,14 +16,10 @@ package com.dimajix.flowman.execution -import java.io.StringWriter - -import scala.collection.JavaConverters._ import scala.collection.mutable import org.apache.hadoop.conf.{Configuration => HadoopConf} import org.apache.spark.SparkConf -import org.apache.velocity.VelocityContext import org.slf4j.Logger import com.dimajix.flowman.config.Configuration @@ -32,12 +28,10 @@ import com.dimajix.flowman.hadoop.FileSystem import com.dimajix.flowman.model.Connection import com.dimajix.flowman.model.Profile import com.dimajix.flowman.model.Template -import com.dimajix.flowman.templating.RecursiveValue -import com.dimajix.flowman.templating.Velocity object AbstractContext { - abstract class Builder[B <: Builder[B,C], C <: Context](parent:Context, defaultSettingLevel:SettingLevel) /*extends Context.Builder[B,C]*/ { this:B => + abstract class Builder[B <: Builder[B,C], C <: Context](parent:Context, defaultSettingLevel:SettingLevel) { this:B => private var _environment = Seq[(String,Any,SettingLevel)]() private var _config = Seq[(String,String,SettingLevel)]() private var _connections = Seq[(String, Template[Connection], SettingLevel)]() @@ -53,6 +47,7 @@ object AbstractContext { val rawConfig = mutable.Map[String,(String, Int)]() val rawConnections = mutable.Map[String, (Template[Connection], Int)]() + // Fetch environment from parent if (parent != null) { parent.rawEnvironment.foreach(kv => rawEnvironment.update(kv._1, kv._2)) parent.rawConfig.foreach(kv => rawConfig.update(kv._1, kv._2)) diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/AnalyzingExecutor.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/AnalyzingExecution.scala similarity index 93% rename from flowman-core/src/main/scala/com/dimajix/flowman/execution/AnalyzingExecutor.scala rename to flowman-core/src/main/scala/com/dimajix/flowman/execution/AnalyzingExecution.scala index 700bdd66b..f00711ae9 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/AnalyzingExecutor.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/AnalyzingExecution.scala @@ -27,8 +27,8 @@ import com.dimajix.flowman.hadoop.FileSystem import com.dimajix.flowman.metric.MetricSystem -class AnalyzingExecutor(context: Context) extends CachingExecutor(null, true) { - override protected val logger = LoggerFactory.getLogger(classOf[AnalyzingExecutor]) +class AnalyzingExecution(context: Context) extends CachingExecution(None, true) { + override protected val logger = LoggerFactory.getLogger(classOf[AnalyzingExecution]) private lazy val _metricSystem = new MetricSystem @@ -39,7 +39,7 @@ class AnalyzingExecutor(context: Context) extends CachingExecutor(null, true) { def flowmanConf : FlowmanConf = context.flowmanConf /** - * Returns the MetricRegistry of this executor + * Returns the MetricRegistry of this execution * * @return */ diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/CachingExecutor.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/CachingExecution.scala similarity index 78% rename from flowman-core/src/main/scala/com/dimajix/flowman/execution/CachingExecutor.scala rename to flowman-core/src/main/scala/com/dimajix/flowman/execution/CachingExecution.scala index 5fe47a9a1..508659643 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/CachingExecutor.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/CachingExecution.scala @@ -30,24 +30,24 @@ import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.types.StructType -abstract class CachingExecutor(parent:Option[Executor], isolated:Boolean) extends Executor { +abstract class CachingExecution(parent:Option[Execution], isolated:Boolean) extends Execution { protected val logger:Logger private val frameCache:IdentityHashMap[Mapping,Map[String,DataFrame]] = { parent match { - case Some(ce:CachingExecutor) if !isolated => + case Some(ce:CachingExecution) if !isolated => ce.frameCache case _ => IdentityHashMap[Mapping,Map[String,DataFrame]]() } } - private val schemaCache:mutable.Map[MappingOutputIdentifier, StructType] = { + private val schemaCache:IdentityHashMap[Mapping, mutable.Map[String,StructType]] = { parent match { - case Some(ce:CachingExecutor) if !isolated => + case Some(ce:CachingExecution) if !isolated => ce.schemaCache case _ => - mutable.Map[MappingOutputIdentifier, StructType]() + IdentityHashMap[Mapping, mutable.Map[String,StructType]]() } } @@ -70,17 +70,17 @@ abstract class CachingExecutor(parent:Option[Executor], isolated:Boolean) extend * @return */ override def describe(mapping:Mapping, output:String) : StructType = { - val oid = MappingOutputIdentifier(mapping.identifier, output) - schemaCache.getOrElseUpdate(oid, { - if (!mapping.outputs.contains(output)) - throw new NoSuchMappingOutputException(oid) - val context = mapping.context - val deps = mapping.inputs - .map(id => id -> describe(context.getMapping(id.mapping), id.output)) - .toMap - - mapping.describe(this, deps, output) - }) + schemaCache.getOrElseUpdate(mapping, mutable.Map()) + .getOrElseUpdate(output, { + if (!mapping.outputs.contains(output)) + throw new NoSuchMappingOutputException(mapping.identifier, output) + val context = mapping.context + val deps = mapping.inputs + .map(id => id -> describe(context.getMapping(id.mapping), id.output)) + .toMap + + mapping.describe(this, deps, output) + }) } /** @@ -138,9 +138,14 @@ abstract class CachingExecutor(parent:Option[Executor], isolated:Boolean) extend else df1 - // Optionally cache the DataFrame - if (cacheLevel != null && cacheLevel != StorageLevel.NONE) - df2.values.foreach(_.persist(cacheLevel)) + // Optionally cache the DataFrames + if (cacheLevel != null && cacheLevel != StorageLevel.NONE) { + // If one of the DataFrame is called 'cache', then only cache that one, otherwise all will be cached + if (df2.keySet.contains("cache")) + df2("cache").persist(cacheLevel) + else + df2.values.foreach(_.persist(cacheLevel)) + } df2.foreach { case (name,df) => logger.debug(s"Instantiated mapping '${mapping.identifier}' output '$name' with schema\n ${df.schema.treeString}") diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/Context.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Context.scala index 134ca967d..1debc4335 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/Context.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Context.scala @@ -34,6 +34,8 @@ import com.dimajix.flowman.model.Relation import com.dimajix.flowman.model.RelationIdentifier import com.dimajix.flowman.model.Target import com.dimajix.flowman.model.TargetIdentifier +import com.dimajix.flowman.model.Test +import com.dimajix.flowman.model.TestIdentifier case class SettingLevel( level:Int @@ -58,7 +60,7 @@ abstract class Context { def namespace : Option[Namespace] /** - * Returns the project associated with this context. Can be null + * Returns the project associated with this context. Can be [[None]] * @return */ def project : Option[Project] @@ -129,25 +131,25 @@ abstract class Context { def getConnection(identifier: ConnectionIdentifier): Connection /** - * Returns a specific named Mapping. The Transform can either be inside this Contexts project or in a different + * Returns a specific named Mapping. The mapping can either be inside this Contexts project or in a different * project within the same namespace * * @param identifier * @return */ - def getMapping(identifier: MappingIdentifier) : Mapping + def getMapping(identifier: MappingIdentifier, allowOverrides:Boolean=true) : Mapping /** - * Returns a specific named Relation. The RelationType can either be inside this Contexts project or in a different + * Returns a specific named Relation. The relation can either be inside this Contexts project or in a different * project within the same namespace * * @param identifier * @return */ - def getRelation(identifier: RelationIdentifier): Relation + def getRelation(identifier: RelationIdentifier, allowOverrides:Boolean=true): Relation /** - * Returns a specific named Target. The TargetType can either be inside this Contexts project or in a different + * Returns a specific named Target. The target can either be inside this Contexts project or in a different * project within the same namespace * * @param identifier @@ -156,7 +158,7 @@ abstract class Context { def getTarget(identifier: TargetIdentifier): Target /** - * Returns a specific named Job. The JobType can either be inside this Contexts project or in a different + * Returns a specific named Job. The job can either be inside this Contexts project or in a different * project within the same namespace * * @param identifier @@ -164,6 +166,21 @@ abstract class Context { */ def getJob(identifier: JobIdentifier): Job + /** + * Returns a specific named Test. The Test can either be inside this Contexts project or in a different + * project within the same namespace + * + * @param identifier + * @return + */ + def getTest(identifier: TestIdentifier): Test + + /** + * Returns the list of active profile names + * @return + */ + def profiles : Set[String] + /** * Returns all configuration options as a key-value map * diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/Execution.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Execution.scala new file mode 100644 index 000000000..cf3196a60 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Execution.scala @@ -0,0 +1,146 @@ +/* + * Copyright 2018-2019 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.execution + +import org.apache.hadoop.conf.Configuration +import org.apache.spark.sql.DataFrame +import org.apache.spark.sql.RuntimeConfig +import org.apache.spark.sql.SparkSession + +import com.dimajix.flowman.catalog.Catalog +import com.dimajix.flowman.config.FlowmanConf +import com.dimajix.flowman.hadoop.FileSystem +import com.dimajix.flowman.metric.MetricSystem +import com.dimajix.flowman.model.Assertion +import com.dimajix.flowman.model.AssertionResult +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.types.StructType + + +/** + * An [[Execution]] is some sort of an execution context that is able to create DataFrames from Mappings. It also + * provides access to the Spark session, Hadoop filesystem and configurations. The [[Execution]] is only used during + * target execution and not created before. + */ +abstract class Execution { + /** + * Returns the MetricRegistry of this execution + * @return + */ + def metrics : MetricSystem + + /** + * Returns the FileSystem as configured in Hadoop + * @return + */ + def fs : FileSystem + + /** + * Returns (or lazily creates) a SparkSession of this Executor. The SparkSession will be derived from the global + * SparkSession, but a new derived session with a separate namespace will be created. + * + * @return + */ + def spark: SparkSession + + /** + * Returns the FlowmanConf object, which contains all Flowman settings. + * @return + */ + def flowmanConf : FlowmanConf + + /** + * Returns the Spark configuration + */ + def sparkConf : RuntimeConfig = spark.conf + + /** + * Returns the Hadoop configuration as used by Spark + * @return + */ + def hadoopConf : Configuration = spark.sparkContext.hadoopConfiguration + + /** + * Returns true if a SparkSession is already available + * @return + */ + def sparkRunning: Boolean + + /** + * Returns the table catalog used for managing table instances + * @return + */ + def catalog: Catalog + + /** + * Creates an instance of a mapping, or retrieves it from cache + * + * @param mapping + */ + def instantiate(mapping:Mapping) : Map[String,DataFrame] + + /** + * Creates an instance of a mapping, or retrieves it from cache + * + * @param mapping + */ + def instantiate(mapping:Mapping, output:String) : DataFrame = { + if (!mapping.outputs.contains(output)) + throw new NoSuchMappingOutputException(MappingOutputIdentifier(mapping.identifier.name, output, mapping.identifier.project)) + + val instances = instantiate(mapping) + instances(output) + } + + def assert(assertion:Assertion) : Seq[AssertionResult] = { + val context = assertion.context + val inputs = assertion.inputs + .map(id => id -> instantiate(context.getMapping(id.mapping), id.output)) + .toMap + + assertion.execute(this, inputs) + } + + /** + * Returns the schema for a specific output created by a specific mapping. Note that not all mappings support + * schema analysis beforehand. In such cases, None will be returned. + * @param mapping + * @param output + * @return + */ + def describe(mapping:Mapping, output:String) : StructType + /** + * Returns the schema for a specific output created by a specific mapping. Note that not all mappings support + * schema analysis beforehand. In such cases, None will be returned. + * @param mapping + * @return + */ + def describe(mapping:Mapping) : Map[String, StructType] = { + val context = mapping.context + val deps = mapping.inputs + .map(id => id -> describe(context.getMapping(id.mapping), id.output)) + .toMap + + mapping.describe(this, deps) + } + + /** + * Releases any temporary tables + */ + def cleanup() : Unit +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/Executor.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Executor.scala index 6198247f1..b815bffef 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/Executor.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Executor.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2019 Kaya Kupferschmidt + * Copyright 2018-2021 Kaya Kupferschmidt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,115 +16,21 @@ package com.dimajix.flowman.execution -import org.apache.hadoop.conf.Configuration -import org.apache.spark.sql.DataFrame -import org.apache.spark.sql.RuntimeConfig -import org.apache.spark.sql.SparkSession - -import com.dimajix.flowman.catalog.Catalog -import com.dimajix.flowman.config.FlowmanConf -import com.dimajix.flowman.hadoop.FileSystem -import com.dimajix.flowman.metric.MetricSystem -import com.dimajix.flowman.model.Mapping -import com.dimajix.flowman.model.MappingOutputIdentifier -import com.dimajix.flowman.types.StructType +import com.dimajix.flowman.model.Target abstract class Executor { /** - * Returns the MetricRegistry of this executor - * @return - */ - def metrics : MetricSystem - - /** - * Returns the FileSystem as configured in Hadoop - * @return - */ - def fs : FileSystem - - /** - * Returns (or lazily creates) a SparkSession of this Executor. The SparkSession will be derived from the global - * SparkSession, but a new derived session with a separate namespace will be created. - * - * @return - */ - def spark: SparkSession - - /** - * Returns the FlowmanConf object, which contains all Flowman settings. - * @return - */ - def flowmanConf : FlowmanConf - - /** - * Returns the Spark configuration - */ - def sparkConf : RuntimeConfig = spark.conf - - /** - * Returns the Hadoop configuration as used by Spark - * @return - */ - def hadoopConf : Configuration = spark.sparkContext.hadoopConfiguration - - /** - * Returns true if a SparkSession is already available - * @return - */ - def sparkRunning: Boolean - - /** - * Returns the table catalog used for managing table instances - * @return - */ - def catalog: Catalog - - /** - * Creates an instance of a mapping, or retrieves it from cache - * - * @param mapping - */ - def instantiate(mapping:Mapping) : Map[String,DataFrame] - - /** - * Creates an instance of a mapping, or retrieves it from cache - * - * @param mapping - */ - def instantiate(mapping:Mapping, output:String) : DataFrame = { - if (!mapping.outputs.contains(output)) - throw new NoSuchMappingOutputException(MappingOutputIdentifier(mapping.identifier.name, output, mapping.identifier.project)) - - val instances = instantiate(mapping) - instances(output) - } - - /** - * Returns the schema for a specific output created by a specific mapping. Note that not all mappings support - * schema analysis beforehand. In such cases, None will be returned. - * @param mapping - * @param output - * @return - */ - def describe(mapping:Mapping, output:String) : StructType - /** - * Returns the schema for a specific output created by a specific mapping. Note that not all mappings support - * schema analysis beforehand. In such cases, None will be returned. - * @param mapping + * Executes a list of targets in an appropriate order. + * + * @param executor + * @param context + * @param phase - Phase to execute + * @param targets - List of all targets, even those which should not be executed + * @param filter - Filter predicate to find all targets to be execution + * @param keepGoing - True if errors in one target should not stop other targets from being executed + * @param fn - Function to call. Note that the function is expected not to throw a non-fatal exception. * @return */ - def describe(mapping:Mapping) : Map[String, StructType] = { - val context = mapping.context - val deps = mapping.inputs - .map(id => id -> describe(context.getMapping(id.mapping), id.output)) - .toMap - - mapping.describe(this, deps) - } - - /** - * Releases any temporary tables - */ - def cleanup() : Unit + def execute(execution: Execution, context:Context, phase: Phase, targets: Seq[Target], filter:Target => Boolean, keepGoing: Boolean)(fn:(Execution,Target,Phase) => Status) : Status } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/Phase.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Phase.scala index 7d38d340a..170154357 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/Phase.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Phase.scala @@ -26,6 +26,9 @@ sealed abstract class Phase { } object Phase { + case object VALIDATE extends Phase { + override val value = "validate" + } case object CREATE extends Phase { override val value = "create" } @@ -44,6 +47,7 @@ object Phase { def ofString(status:String) : Phase = { status.toLowerCase(Locale.ROOT) match { + case VALIDATE.value => VALIDATE case CREATE.value => CREATE case BUILD.value => BUILD case VERIFY.value => VERIFY @@ -61,6 +65,7 @@ object Phase { */ object Lifecycle { val BUILD:Seq[Phase] = Seq( + Phase.VALIDATE, Phase.CREATE, Phase.BUILD, Phase.VERIFY diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/ProjectContext.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/ProjectContext.scala index 87ecbc218..dd8a1e09d 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/ProjectContext.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/ProjectContext.scala @@ -23,6 +23,7 @@ import org.slf4j.LoggerFactory import com.dimajix.flowman.hadoop.File import com.dimajix.flowman.model.Connection import com.dimajix.flowman.model.ConnectionIdentifier +import com.dimajix.flowman.model.Identifier import com.dimajix.flowman.model.Job import com.dimajix.flowman.model.JobIdentifier import com.dimajix.flowman.model.Mapping @@ -36,6 +37,8 @@ import com.dimajix.flowman.model.RelationIdentifier import com.dimajix.flowman.model.Target import com.dimajix.flowman.model.TargetIdentifier import com.dimajix.flowman.model.Template +import com.dimajix.flowman.model.Test +import com.dimajix.flowman.model.TestIdentifier import com.dimajix.flowman.templating.FileWrapper @@ -45,14 +48,36 @@ object ProjectContext { require(project != null) override protected val logger = LoggerFactory.getLogger(classOf[ProjectContext]) + private var overrideMappings:Map[String, Template[Mapping]] = Map() + private var overrideRelations:Map[String, Template[Relation]] = Map() override def withProfile(profile:Profile) : Builder = { withProfile(profile, SettingLevel.PROJECT_PROFILE) this } + /** + * Add extra mappings, which potentially override existing project mappings + * @param mappings + * @return + */ + def overrideMappings(mappings:Map[String,Template[Mapping]]) : Builder = { + overrideMappings = overrideMappings ++ mappings + this + } + + /** + * Adds extra relations, which potentially override existing project relations + * @param relations + * @return + */ + def overrideRelations(relations:Map[String,Template[Relation]]) : Builder = { + overrideRelations = overrideRelations ++ relations + this + } + override protected def createContext(env:Map[String,(Any, Int)], config:Map[String,(String, Int)], connections:Map[String, Template[Connection]]) : ProjectContext = { - new ProjectContext(parent, project, env, config, connections) + new ProjectContext(parent, project, env, config, connections, overrideMappings, overrideRelations) } } @@ -66,21 +91,26 @@ object ProjectContext { * @param parent * @param _project */ -class ProjectContext private[execution]( +final class ProjectContext private[execution]( parent:Context, _project:Project, _env:Map[String,(Any, Int)], _config:Map[String,(String, Int)], - nonProjectConnections:Map[String, Template[Connection]] + extraConnections:Map[String, Template[Connection]], + overrideMappingTemplates:Map[String, Template[Mapping]], + overrideRelationTemplates:Map[String, Template[Relation]] ) extends AbstractContext( _env + ("project" -> ((ProjectWrapper(_project), SettingLevel.SCOPE_OVERRIDE.level))), _config) { private val mappings = mutable.Map[String,Mapping]() + private val overrideMappings = mutable.Map[String,Mapping]() private val relations = mutable.Map[String,Relation]() + private val overrideRelations = mutable.Map[String,Relation]() private val targets = mutable.Map[String,Target]() private val connections = mutable.Map[String,Connection]() private val jobs = mutable.Map[String,Job]() + private val tests = mutable.Map[String,Test]() /** * Returns the namespace associated with this context. Can be null @@ -88,6 +118,10 @@ class ProjectContext private[execution]( */ override def namespace : Option[Namespace] = parent.namespace + /** + * Returns the project associated with this context. Can be [[None]] + * @return + */ override def project : Option[Project] = Some(_project) /** @@ -96,6 +130,13 @@ class ProjectContext private[execution]( */ override def root : RootContext = parent.root + /** + * Returns the list of active profile names + * + * @return + */ + override def profiles: Set[String] = parent.profiles + /** * Returns a specific named Transform. The Transform can either be inside this Contexts project or in a different * project within the same namespace @@ -103,20 +144,26 @@ class ProjectContext private[execution]( * @param identifier * @return */ - override def getMapping(identifier: MappingIdentifier): Mapping = { + override def getMapping(identifier: MappingIdentifier, allowOverrides:Boolean=true): Mapping = { require(identifier != null && identifier.nonEmpty) + def findOverride() = { + if (allowOverrides) { + findOrInstantiate(identifier, overrideMappingTemplates, overrideMappings) + } + else { + None + } + } + def find() = { + findOrInstantiate(identifier, _project.mappings, mappings) + } + if (identifier.project.forall(_ == _project.name)) { - mappings.getOrElseUpdate(identifier.name, - _project.mappings - .getOrElse(identifier.name, - throw new NoSuchMappingException(identifier) - ) - .instantiate(this) - ) + findOverride().orElse(find()).getOrElse(throw new NoSuchMappingException(identifier)) } else { - parent.getMapping(identifier) + parent.getMapping(identifier, allowOverrides) } } @@ -127,20 +174,26 @@ class ProjectContext private[execution]( * @param identifier * @return */ - override def getRelation(identifier: RelationIdentifier): Relation = { + override def getRelation(identifier: RelationIdentifier, allowOverrides:Boolean=true): Relation = { require(identifier != null && identifier.nonEmpty) + def findOverride() = { + if (allowOverrides) { + findOrInstantiate(identifier, overrideRelationTemplates, overrideRelations) + } + else { + None + } + } + def find() = { + findOrInstantiate(identifier, _project.relations, relations) + } + if (identifier.project.forall(_ == _project.name)) { - relations.getOrElseUpdate(identifier.name, - _project.relations - .getOrElse(identifier.name, - throw new NoSuchRelationException(identifier) - ) - .instantiate(this) - ) + findOverride().orElse(find()).getOrElse(throw new NoSuchRelationException(identifier)) } else { - parent.getRelation(identifier) + parent.getRelation(identifier, allowOverrides) } } @@ -177,35 +230,38 @@ class ProjectContext private[execution]( override def getConnection(identifier:ConnectionIdentifier) : Connection = { require(identifier != null && identifier.nonEmpty) - if (identifier.project.forall(_ == _project.name)) { + if (identifier.project.contains(_project.name)) { + // Case 1: Project identifier explicitly set. Only look inside project connections.getOrElseUpdate(identifier.name, - if (identifier.project.nonEmpty) { - // Explicit project identifier specified, only look in project connections - _project.connections - .getOrElse(identifier.name, - throw new NoSuchConnectionException(identifier) - ) - .instantiate(this) - } - else { - // No project specifier given, first look into non-project connections, then try project connections - nonProjectConnections.getOrElse(identifier.name, - _project.connections - .getOrElse(identifier.name, - throw new NoSuchConnectionException(identifier) - ) + extraConnections.getOrElse(identifier.name, + _project.connections.getOrElse(identifier.name, + throw new NoSuchConnectionException(identifier) ) - .instantiate(this) - } + ) + .instantiate(this) + ) + } + else if (identifier.project.isEmpty) { + // Case 2: Project identifier not set. Look in project and in parent. + connections.getOrElse(identifier.name, + extraConnections.get(identifier.name) + .orElse(_project.connections.get(identifier.name)) + .map { t => + val instance = t.instantiate(this) + connections.update(identifier.name, instance) + instance + } + .getOrElse(parent.getConnection(identifier)) ) } else { + // Case 3: Project identifier set to different project parent.getConnection(identifier) } } /** - * Returns a specific named Job. The JobType can either be inside this Contexts project or in a different + * Returns a specific named Job. The job can either be inside this Contexts project or in a different * project within the same namespace * * @param identifier @@ -227,4 +283,41 @@ class ProjectContext private[execution]( parent.getJob(identifier) } } + + /** + * Returns a specific named Test. The test can either be inside this Contexts project or in a different + * project within the same namespace + * + * @param identifier + * @return + */ + override def getTest(identifier: TestIdentifier): Test = { + require(identifier != null && identifier.nonEmpty) + + if (identifier.project.forall(_ == _project.name)) { + tests.getOrElseUpdate(identifier.name, + _project.tests + .getOrElse(identifier.name, + throw new NoSuchTestException(identifier) + ) + .instantiate(this) + ) + } + else { + parent.getTest(identifier) + } + } + + private def findOrInstantiate[T](identifier:Identifier[T], templates:Map[String,Template[T]], cache:mutable.Map[String,T]) = { + val name = identifier.name + cache.get(name) + .orElse { + val m = templates + .get(name) + .map(_.instantiate(this)) + m.foreach(m => cache.update(name, m)) + m + } + } + } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/RootContext.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/RootContext.scala index 0d77052f5..5e694fc4b 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/RootContext.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/RootContext.scala @@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory import com.dimajix.flowman.config.Configuration import com.dimajix.flowman.config.FlowmanConf +import com.dimajix.flowman.execution.ProjectContext.Builder import com.dimajix.flowman.hadoop.FileSystem import com.dimajix.flowman.model.Connection import com.dimajix.flowman.model.ConnectionIdentifier @@ -40,11 +41,15 @@ import com.dimajix.flowman.model.RelationIdentifier import com.dimajix.flowman.model.Target import com.dimajix.flowman.model.TargetIdentifier import com.dimajix.flowman.model.Template +import com.dimajix.flowman.model.Test +import com.dimajix.flowman.model.TestIdentifier object RootContext { - class Builder private[RootContext](namespace:Option[Namespace], profiles:Seq[String], parent:Context = null) extends AbstractContext.Builder[Builder,RootContext](parent, SettingLevel.NAMESPACE_SETTING) { + class Builder private[RootContext](namespace:Option[Namespace], profiles:Set[String], parent:Context = null) extends AbstractContext.Builder[Builder,RootContext](parent, SettingLevel.NAMESPACE_SETTING) { private var projectResolver:Option[String => Option[Project]] = None + private var overrideMappings:Map[MappingIdentifier, Template[Mapping]] = Map() + private var overrideRelations:Map[RelationIdentifier, Template[Relation]] = Map() override protected val logger = LoggerFactory.getLogger(classOf[RootContext]) @@ -58,24 +63,50 @@ object RootContext { this } + /** + * Add extra mappings, which potentially override existing project mappings + * @param mappings + * @return + */ + def overrideMappings(mappings:Map[MappingIdentifier,Template[Mapping]]) : Builder = { + if (mappings.keySet.exists(_.project.isEmpty)) + throw new IllegalArgumentException("MappingIdentifiers need to contain valid project for overriding") + overrideMappings = overrideMappings ++ mappings + this + } + + /** + * Adds extra relations, which potentially override existing project relations + * @param relations + * @return + */ + def overrideRelations(relations:Map[RelationIdentifier,Template[Relation]]) : Builder = { + if (relations.keySet.exists(_.project.isEmpty)) + throw new IllegalArgumentException("RelationIdentifiers need to contain valid project for overriding") + overrideRelations = overrideRelations ++ relations + this + } + override protected def createContext(env:Map[String,(Any, Int)], config:Map[String,(String, Int)], connections:Map[String, Template[Connection]]) : RootContext = { - new RootContext(namespace, projectResolver, profiles, env, config, connections) + new RootContext(namespace, projectResolver, profiles, env, config, connections, overrideMappings, overrideRelations) } } - def builder() = new Builder(None, Seq()) - def builder(namespace:Option[Namespace], profiles:Seq[String]) = new Builder(namespace, profiles) - def builder(parent:Context) = new Builder(parent.namespace, Seq(), parent) + def builder() = new Builder(None, Set()) + def builder(namespace:Option[Namespace], profiles:Set[String]) = new Builder(namespace, profiles) + def builder(parent:Context) = new Builder(parent.namespace, Set(), parent) } -class RootContext private[execution]( +final class RootContext private[execution]( _namespace:Option[Namespace], projectResolver:Option[String => Option[Project]], - profiles:Seq[String], + _profiles:Set[String], _env:Map[String,(Any, Int)], _config:Map[String,(String, Int)], - nonNamespaceConnections:Map[String, Template[Connection]] + extraConnections:Map[String, Template[Connection]], + overrideMappings:Map[MappingIdentifier, Template[Mapping]], + overrideRelations:Map[RelationIdentifier, Template[Relation]] ) extends AbstractContext( _env + ("namespace" -> (NamespaceWrapper(_namespace) -> SettingLevel.SCOPE_OVERRIDE.level)), _config @@ -98,42 +129,51 @@ class RootContext private[execution]( override def project: Option[Project] = None /** - * Returns the root context in a hierarchy of connected contexts + * Returns the root context in a hierarchy of connected contexts. In the case of a [[RootContext]], the + * context itself is returned. * @return */ override def root : RootContext = this /** - * Returns a fully qualified mapping from a project belonging to the namespace of this executor + * Returns the list of active profile names + * + * @return + */ + override def profiles: Set[String] = _profiles + + /** + * Returns a fully qualified mapping from a project belonging to the namespace of this execution * * @param identifier * @return */ - override def getMapping(identifier: MappingIdentifier): Mapping = { + override def getMapping(identifier: MappingIdentifier, allowOverrides:Boolean=true): Mapping = { require(identifier != null && identifier.nonEmpty) if (identifier.project.isEmpty) throw new NoSuchMappingException(identifier) val child = getProjectContext(identifier.project.get) - child.getMapping(MappingIdentifier(identifier.name, None)) + child.getMapping(identifier, allowOverrides) } + /** - * Returns a fully qualified relation from a project belonging to the namespace of this executor + * Returns a fully qualified relation from a project belonging to the namespace of this execution * * @param identifier * @return */ - override def getRelation(identifier: RelationIdentifier): Relation = { + override def getRelation(identifier: RelationIdentifier, allowOverrides:Boolean=true): Relation = { require(identifier != null && identifier.nonEmpty) if (identifier.project.isEmpty) throw new NoSuchRelationException(identifier) val child = getProjectContext(identifier.project.get) - child.getRelation(RelationIdentifier(identifier.name, None)) + child.getRelation(identifier, allowOverrides) } /** - * Returns a fully qualified target from a project belonging to the namespace of this executor + * Returns a fully qualified target from a project belonging to the namespace of this execution * * @param identifier * @return @@ -144,11 +184,11 @@ class RootContext private[execution]( if (identifier.project.isEmpty) throw new NoSuchTargetException(identifier) val child = getProjectContext(identifier.project.get) - child.getTarget(TargetIdentifier(identifier.name, None)) + child.getTarget(identifier) } /** - * Returns a fully qualified connection from a project belonging to the namespace of this executor + * Returns a fully qualified connection from a project belonging to the namespace of this execution * * @param identifier * @return @@ -158,7 +198,7 @@ class RootContext private[execution]( if (identifier.project.isEmpty) { connections.getOrElseUpdate(identifier.name, - nonNamespaceConnections.get(identifier.name) + extraConnections.get(identifier.name) .orElse( namespace .flatMap(_.connections.get(identifier.name)) @@ -169,12 +209,12 @@ class RootContext private[execution]( } else { val child = getProjectContext(identifier.project.get) - child.getConnection(ConnectionIdentifier(identifier.name, None)) + child.getConnection(identifier) } } /** - * Returns a fully qualified job from a project belonging to the namespace of this executor + * Returns a fully qualified job from a project belonging to the namespace of this execution * * @param identifier * @return @@ -185,7 +225,22 @@ class RootContext private[execution]( if (identifier.project.isEmpty) throw new NoSuchJobException(identifier) val child = getProjectContext(identifier.project.get) - child.getJob(JobIdentifier(identifier.name, None)) + child.getJob(identifier) + } + + /** + * Returns a fully qualified test from a project belonging to the namespace of this execution + * + * @param identifier + * @return + */ + override def getTest(identifier: TestIdentifier): Test = { + require(identifier != null && identifier.nonEmpty) + + if (identifier.project.isEmpty) + throw new NoSuchTestException(identifier) + val child = getProjectContext(identifier.project.get) + child.getTest(identifier) } /** @@ -211,8 +266,12 @@ class RootContext private[execution]( } } + // Apply overrides + builder.overrideMappings(overrideMappings.filter(_._1.project.contains(project.name)).map(kv => (kv._1.name, kv._2))) + builder.overrideRelations(overrideRelations.filter(_._1.project.contains(project.name)).map(kv => (kv._1.name, kv._2))) + val context = builder.withEnvironment(project.environment) - .withConfig(project.config.toMap) + .withConfig(project.config) .build() _children.update(project.name, context) diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/RootExecutor.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/RootExecution.scala similarity index 93% rename from flowman-core/src/main/scala/com/dimajix/flowman/execution/RootExecutor.scala rename to flowman-core/src/main/scala/com/dimajix/flowman/execution/RootExecution.scala index 620d3508c..9c501d0b9 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/RootExecutor.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/RootExecution.scala @@ -25,8 +25,8 @@ import com.dimajix.flowman.hadoop.FileSystem import com.dimajix.flowman.metric.MetricSystem -class RootExecutor(session:Session) extends CachingExecutor(None, true) { - override protected val logger = LoggerFactory.getLogger(classOf[RootExecutor]) +class RootExecution(session:Session) extends CachingExecution(None, true) { + override protected val logger = LoggerFactory.getLogger(classOf[RootExecution]) /** * Returns the FlowmanConf object, which contains all Flowman settings. @@ -35,7 +35,7 @@ class RootExecutor(session:Session) extends CachingExecutor(None, true) { def flowmanConf : FlowmanConf = session.flowmanConf /** - * Returns the MetricRegistry of this executor + * Returns the MetricRegistry of this execution * @return */ override def metrics : MetricSystem = session.metrics diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/Runner.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Runner.scala index 0e8cc26c3..5e7d46d41 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/Runner.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Runner.scala @@ -16,16 +16,22 @@ package com.dimajix.flowman.execution +import java.time.Duration +import java.time.Instant +import java.time.ZoneId +import java.util.Locale + import scala.util.Failure import scala.util.Success import scala.util.Try import scala.util.control.NonFatal +import scala.util.matching.Regex import org.slf4j.LoggerFactory import com.dimajix.common.No -import com.dimajix.flowman.config.FlowmanConf.EXECUTION_TARGET_FORCE_DIRTY -import com.dimajix.flowman.execution.Runner.RunnerJobToken +import com.dimajix.flowman.config.FlowmanConf +import com.dimajix.flowman.execution.JobRunnerImpl.RunnerJobToken import com.dimajix.flowman.history.StateStore import com.dimajix.flowman.history.TargetState import com.dimajix.flowman.metric.MetricBoard @@ -35,249 +41,222 @@ import com.dimajix.flowman.model.Hook import com.dimajix.flowman.model.Job import com.dimajix.flowman.model.JobInstance import com.dimajix.flowman.model.JobWrapper +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.RelationIdentifier import com.dimajix.flowman.model.Target import com.dimajix.flowman.model.TargetInstance import com.dimajix.flowman.model.Template +import com.dimajix.flowman.model.Test +import com.dimajix.flowman.util.ConsoleColors._ import com.dimajix.flowman.util.withShutdownHook +import com.dimajix.spark.sql.DataFrameUtils -object Runner { - private final case class RunnerJobToken(tokens:Seq[(JobListener, JobToken)]) extends JobToken - private final case class RunnerTargetToken(tokens:Seq[(JobListener, TargetToken)]) extends TargetToken -} - - -final class Runner( - parentExecutor:Executor, - stateStore: StateStore, - hooks: Seq[Template[Hook]]=Seq() -) { - require(parentExecutor != null) - require(stateStore != null) - require(hooks != null) - - private val logger = LoggerFactory.getLogger(classOf[Runner]) - - /** - * Executes a single job using the given executor and a map of parameters. The Runner may decide not to - * execute a specific job, because some information may indicate that the job has already been successfully - * run in the past. This behaviour can be overridden with the force flag - * @param phases - * @return - */ - def executeJob(job:Job, phases:Seq[Phase], args:Map[String,Any]=Map(), force:Boolean=false, keepGoing:Boolean=false) : Status = { - require(args != null) - require(phases != null) - require(args != null) - logger.info(s"Executing phases ${phases.map(p => "'" + p + "'").mkString(",")} for job '${job.identifier}'") +private[execution] sealed class RunnerImpl { + val logger = LoggerFactory.getLogger(classOf[Runner]) - withJobContext(job, args, force) { (jobContext, arguments) => - withExecutor(job) { executor => - Status.ofAll(phases) { phase => - executeJobPhase(executor, jobContext, job, phase, arguments, force, keepGoing) - } - } + def withStatus[T](target:Target, phase:Phase)(fn: => T) : Status = { + Try { + fn + } + match { + case Success(_) => + logger.info(green(s"Successfully finished phase '$phase' for target '${target.identifier}'")) + Status.SUCCESS + case Failure(NonFatal(e)) => + logger.error(s"Caught exception while executing phase '$phase' for target '${target.identifier}'", e) + Status.FAILED } } - /** - * Executes a single target using the given executor and a map of parameters. The Runner may decide not to - * execute a specific target, because some information may indicate that the job has already been successfully - * run in the past. This behaviour can be overriden with the force flag - * @param targets - * @param phases - * @return - */ - def executeTargets(targets:Seq[Target], phases:Seq[Phase], force:Boolean, keepGoing:Boolean=false) : Status = { - if (targets.nonEmpty) { - val context = targets.head.context - val job = Job.builder(context) - .setName("execute-target") - .setTargets(targets.map(_.identifier)) - .build() - - withJobContext(job, force) { context => - withExecutor(job) { executor => - Status.ofAll(phases) { phase => - executeJobPhase(executor, context, job, phase, Map(), force, keepGoing) - } - } - } + private val separator = boldWhite((0 to 79).map(_ => "-").mkString) + def logSubtitle(s:String) : Unit = { + val l = (77 - (s.length + 1)) / 2 + val t = if (l > 3) { + val sep = (0 to l).map(_ => '-').mkString + boldWhite(sep) + " " + boldCyan(s) + " " + boldWhite(sep) } else { - Status.SUCCESS + boldWhite("--- ") + boldCyan(s) + boldWhite(" ---") } - } - /** - * Provides a context for the given job - * @param job - * @param args - * @param force - * @param fn - * @tparam T - * @return - */ - def withJobContext[T](job:Job, args:Map[String,Any]=Map(), force:Boolean=false)(fn:(Context,Map[String,Any]) => T) : T = { - val arguments : Map[String,Any] = job.parameters.flatMap(p => p.default.map(d => p.name -> d)).toMap ++ args - arguments.toSeq.sortBy(_._1).foreach { case (k,v) => logger.info(s"Job argument $k=$v")} - - verifyArguments(job,arguments) + logger.info("") + logger.info(t) + } - val rootContext = RootContext.builder(job.context) - .withEnvironment("force", force) - .withEnvironment("job", JobWrapper(job)) - .withEnvironment(arguments, SettingLevel.SCOPE_OVERRIDE) - .withEnvironment(job.environment, SettingLevel.JOB_OVERRIDE) - .build() - val jobContext = if (job.context.project.nonEmpty) - rootContext.getProjectContext(job.context.project.get) - else - rootContext - fn(jobContext, arguments) + def logTitle(title:String) : Unit = { + logger.info("") + logger.info(separator) + logger.info(boldWhite(s" $title")) + logger.info(separator) } - def withJobContext[T](job:Job, force:Boolean)(fn:Context => T) : T = { - val context = ScopeContext.builder(job.context) - .withEnvironment("force", force) - .withEnvironment("job", JobWrapper(job)) - .build() - fn(context) + def logEnvironment(context:Context) : Unit = { + logger.info("Environment:") + context.environment.toSeq.sortBy(_._1).foreach { case (k, v) => logger.info(s" $k=$v") } + logger.info("") } - /** - * Creates an code environment containing a [[Context]] for the specified phase - * @param phase - * @param fn - * @tparam T - * @return - */ - def withPhaseContext[T](jobContext:Context, phase:Phase)(fn:Context => T) : T = { - val context = ScopeContext.builder(jobContext) - .withEnvironment("phase", phase.toString) - .build() - fn(context) + def logStatus(title:String, status:Status, duration: Duration, endTime:Instant) : Unit = { + val msg = status match { + case Status.SUCCESS|Status.SKIPPED => + boldGreen(s"${status.toString.toUpperCase(Locale.ROOT)} $title") + case Status.ABORTED|Status.FAILED => + boldRed(s"${status.toString.toUpperCase(Locale.ROOT)} $title") + case Status.RUNNING => + boldYellow(s"ALREADY RUNNING $title") + case status => + boldRed(s"UNKNOWN STATE '$status' in $title. Assuming failure") + } + + logger.info(separator) + logger.info(msg) + logger.info(separator) + logger.info(s"Total time: ${duration.toMillis / 1000.0} s") + logger.info(s"Finished at: ${endTime.atZone(ZoneId.systemDefault())}") + logger.info(separator) } +} + + +/** + * Private implementation of Job specific methods + */ +private[execution] object JobRunnerImpl { + private final case class RunnerJobToken(tokens:Seq[(JobListener, JobToken)]) extends JobToken + private final case class RunnerTargetToken(tokens:Seq[(JobListener, TargetToken)]) extends TargetToken +} +private[execution] final class JobRunnerImpl(runner:Runner) extends RunnerImpl { + private val stateStore = runner.stateStore + private val parentExecution = runner.parentExecution /** - * Creates an code environment containing a [[Environment]] for the specified phase - * @param phase - * @param fn - * @tparam T + * Executes a single job using the given execution and a map of parameters. The Runner may decide not to + * execute a specific job, because some information may indicate that the job has already been successfully + * run in the past. This behaviour can be overridden with the force flag + * @param phases * @return */ - def withEnvironment[T](job:Job, phase:Phase, args:Map[String,Any]=Map(), force:Boolean=false)(fn:Environment => T) : T = { - withJobContext(job, args, force) { (jobContext,_) => - withPhaseContext(jobContext, phase) { context => - fn(context.environment) - } - } - } + def executeJob(job:Job, phases:Seq[Phase], args:Map[String,Any]=Map(), targets:Seq[Regex]=Seq(".*".r), force:Boolean=false, keepGoing:Boolean=false, dryRun:Boolean=false) : Status = { + require(args != null) + require(phases != null) + require(args != null) - def withExecutor[T](job:Job)(fn:Executor => T) : T = { - val isolated = job.parameters.nonEmpty || job.environment.nonEmpty - val executor : Executor = if (isolated) new ScopedExecutor(parentExecutor) else parentExecutor - val result = fn(executor) - if (isolated) { - executor.cleanup() - } - result - } + runner.withJobContext(job, args, force, dryRun) { (jobContext, arguments) => + withExecution(job) { execution => + Status.ofAll(phases, keepGoing) { phase => + // Check if build phase really contains any active target. Otherwise we skip this phase and mark it + // as SUCCESS (an empty list is always executed as SUCCESS) + val isActive = job.targets + .filter(target => targets.exists(_.unapplySeq(target.name).nonEmpty)) + .exists { target => + // This might throw exceptions for non-existing targets. The same + // exception will be thrown and handeled properly in executeJobPhase + try { + jobContext.getTarget(target).phases.contains(phase) + } catch { + case NonFatal(_) => true + } + } - private def verifyArguments(job:Job, arguments:Map[String,Any]) : Unit = { - // Verify job arguments. This is moved from the constructor into this place, such that only this method throws an exception - val argNames = arguments.keySet - val paramNames = job.parameters.map(_.name).toSet - argNames.diff(paramNames).foreach(p => throw new IllegalArgumentException(s"Unexpected argument '$p' not defined in job '${job.identifier}'")) - paramNames.diff(argNames).foreach(p => throw new IllegalArgumentException(s"Required parameter '$p' not specified for job '${job.identifier}'")) + if (isActive) { + executeJobPhase(execution, jobContext, job, phase, arguments, targets, force, keepGoing, dryRun) + } + else { + Status.SUCCESS + } + } + } + } } - private def executeJobPhase(executor: Executor, jobContext:Context, job:Job, phase:Phase, arguments:Map[String,Any], force:Boolean, keepGoing:Boolean) : Status = { - withPhaseContext(jobContext, phase) { context => - val desc = job.description.map("(" + _ + ")").getOrElse("") - val args = if (arguments.nonEmpty) s" with arguments ${arguments.map(kv => kv._1 + "=" + kv._2).mkString(", ")}" else "" - logger.info(s"Running phase '$phase' of job '${job.identifier}' $desc $args") - context.environment.toSeq.sortBy(_._1).foreach { case (k, v) => logger.info(s"Environment (phase=$phase) $k=$v") } + def executeJobPhase( + execution: Execution, + jobContext:Context, + job:Job, phase:Phase, + arguments:Map[String,Any], + targets:Seq[Regex], + force:Boolean, + keepGoing:Boolean, + dryRun:Boolean) : Status = { + runner.withPhaseContext(jobContext, phase) { context => + val title = s"${phase.toString.toUpperCase} job '${job.identifier}' ${arguments.map(kv => kv._1 + "=" + kv._2).mkString(", ")}" + logTitle(title) + logEnvironment(context) val instance = job.instance(arguments.map { case (k, v) => k -> v.toString }) - val allHooks = (hooks ++ job.hooks).map(_.instantiate(context)) + val allHooks = (runner.hooks ++ job.hooks).map(_.instantiate(context)) val allMetrics = job.metrics.map(_.instantiate(context)) - withMetrics(executor.metrics, allMetrics) { - recordJob(instance, phase, allHooks) { token => - Try { - withWallTime(executor.metrics, job.metadata, phase) { - executeJobTargets(executor, context, job, phase, token, force, keepGoing) + withMetrics(execution.metrics, allMetrics) { + val startTime = Instant.now() + val status = recordJob(instance, phase, allHooks, dryRun) { token => + try { + withWallTime(execution.metrics, job.metadata, phase) { + executeJobTargets(execution, context, job, phase, targets, token, force, keepGoing, dryRun) } } - match { - case Success(status@Status.SUCCESS) => - logger.info(s"Successfully finished phase '$phase' of job '${job.identifier}'$args") - status - case Success(status@Status.SKIPPED) => - logger.info(s"Execution of phase '$phase' of job '${job.identifier}'$args skipped") - status - case Success(status@Status.FAILED) => - logger.error(s"Execution of phase '$phase' of job '${job.identifier}'$args failed") - status - case Success(status@Status.ABORTED) => - logger.error(s"Execution of phase '$phase' of job '${job.identifier}'$args aborted") - status - case Success(status@Status.RUNNING) => - logger.error(s"Execution of phase '$phase' of job '${job.identifier}'$args already running") - status - case Success(status) => - logger.error(s"Execution of phase '$phase' of job '${job.identifier}'$args in unknown state. Assuming failure") - status - case Failure(NonFatal(e)) => - logger.error(s"Caught exception while executing phase '$phase' of job '${job.identifier}'$args", e) + catch { + case NonFatal(ex) => + logger.error(s"Caught exception during $title:", ex) Status.FAILED } } + val endTime = Instant.now() + val duration = Duration.between(startTime, endTime) + logStatus(title, status, duration, endTime) + status } } } + def withExecution[T](job:Job)(fn:Execution => T) : T = { + val isolated = job.parameters.nonEmpty || job.environment.nonEmpty + val execution : Execution = if (isolated) new ScopedExecution(parentExecution) else parentExecution + val result = fn(execution) + if (isolated) { + execution.cleanup() + } + result + } + /** - * Executes a single target using the given executor and a map of parameters. The Runner may decide not to - * execute a specific target, because some information may indicate that the job has already been successfully - * run in the past. This behaviour can be overriden with the force flag - * @param target - * @param phase - * @return - */ - private def executeTargetPhase(executor: Executor, target:Target, phase:Phase, jobToken:RunnerJobToken, force:Boolean) : Status = { + * Executes a single target using the given execution and a map of parameters. The Runner may decide not to + * execute a specific target, because some information may indicate that the job has already been successfully + * run in the past. This behaviour can be overriden with the force flag + * @param target + * @param phase + * @return + */ + private def executeTargetPhase(execution: Execution, target:Target, phase:Phase, jobToken:RunnerJobToken, force:Boolean, dryRun:Boolean) : Status = { // Create target instance for state server val instance = target.instance - val forceDirty = force || executor.flowmanConf.getConf(EXECUTION_TARGET_FORCE_DIRTY) + val forceDirty = force || execution.flowmanConf.getConf(FlowmanConf.EXECUTION_TARGET_FORCE_DIRTY) val canSkip = !force && checkTarget(instance, phase) - recordTarget(instance, phase, jobToken) { + recordTarget(instance, phase, jobToken, dryRun) { + logSubtitle(s"$phase target '${target.identifier}'") + // First checkJob if execution is really required if (canSkip) { - logger.info(s"Target '${target.identifier}' up to date for phase '$phase' according to state store, skipping execution") + logger.info(cyan("Target '${target.identifier}' up to date for phase '$phase' according to state store, skipping execution")) + logger.info("") Status.SKIPPED } - else if (!forceDirty && target.dirty(executor, phase) == No) { - logger.info(s"Target '${target.identifier}' not dirty in phase $phase, skipping execution") + else if (!forceDirty && target.dirty(execution, phase) == No) { + logger.info(cyan(s"Target '${target.identifier}' not dirty in phase $phase, skipping execution")) + logger.info("") Status.SKIPPED } else { - Try { - logger.info(s"Running phase '$phase' of target '${target.identifier}'") - withWallTime(executor.metrics, target.metadata, phase) { - target.execute(executor, phase) + withStatus(target, phase) { + if (!dryRun) { + withWallTime(execution.metrics, target.metadata, phase) { + target.execute(execution, phase) + } } } - match { - case Success(_) => - logger.info(s"Successfully finished phase '$phase' for target '${target.identifier}'") - Status.SUCCESS - case Failure(NonFatal(e)) => - logger.error(s"Caught exception while executing phase '$phase' for target '${target.identifier}'", e) - Status.FAILED - } } } } @@ -291,22 +270,20 @@ final class Runner( * @param token * @return */ - private def executeJobTargets(executor:Executor, context:Context, job:Job, phase:Phase, token:RunnerJobToken, force:Boolean, keepGoing:Boolean) : Status = { + private def executeJobTargets(execution:Execution, context:Context, job:Job, phase:Phase, targets:Seq[Regex], token:RunnerJobToken, force:Boolean, keepGoing:Boolean, dryRun:Boolean) : Status = { require(phase != null) - // First determine ordering before filtering active targets, since their might be some transitive dependencies - // in place. For example accessing a VIEW which does not require a BUILD but accesses other resources - val targets = job.targets.map(t => context.getTarget(t)) - val orderedTargets = phase match { - case Phase.DESTROY | Phase.TRUNCATE => TargetOrdering.sort(targets, phase).reverse - case _ => TargetOrdering.sort(targets, phase) - } - val activeTargets = orderedTargets.filter(_.phases.contains(phase)) + val jobTargets = job.targets.map(t => context.getTarget(t)) - logger.info(s"Executing phase '$phase' with sequence: ${activeTargets.map(_.identifier).mkString(", ")}") + val clazz = execution.flowmanConf.getConf(FlowmanConf.EXECUTION_EXECUTOR_CLASS) + val ctor = clazz.getDeclaredConstructor() + val executor = ctor.newInstance() - Status.ofAll(activeTargets, keepGoing) { target => - executeTargetPhase(executor, target, phase, token, force) + def targetFilter(target:Target) : Boolean = + target.phases.contains(phase) && targets.exists(_.unapplySeq(target.name).nonEmpty) + + executor.execute(execution, context, phase, jobTargets, targetFilter, keepGoing) { (execution, target, phase) => + executeTargetPhase(execution, target, phase, token, force, dryRun) } } @@ -318,18 +295,18 @@ final class Runner( * @param fn * @return */ - private def recordJob(job:JobInstance, phase:Phase, hooks:Seq[Hook])(fn: RunnerJobToken => Status) : Status = { + private def recordJob(job:JobInstance, phase:Phase, hooks:Seq[Hook], dryRun:Boolean)(fn: RunnerJobToken => Status) : Status = { def startJob() : Seq[(JobListener, JobToken)] = { Seq((stateStore, stateStore.startJob(job, phase))) ++ - hooks.flatMap { hook => - try { - Some((hook, hook.startJob(job, phase))) - } catch { - case NonFatal(ex) => - logger.warn("Execution listener threw exception on startJob.", ex) - None + hooks.flatMap { hook => + try { + Some((hook, hook.startJob(job, phase))) + } catch { + case NonFatal(ex) => + logger.warn("Execution listener threw exception on startJob.", ex) + None + } } - } } def finishJob(tokens:Seq[(JobListener, JobToken)], status:Status) : Unit = { @@ -343,15 +320,20 @@ final class Runner( } } - val tokens = startJob() - withShutdownHook(finishJob(tokens, Status.FAILED)) { - val status = fn(RunnerJobToken(tokens)) - finishJob(tokens, status) - status + if (dryRun) { + fn(RunnerJobToken(Seq())) + } + else { + val tokens = startJob() + withShutdownHook(finishJob(tokens, Status.FAILED)) { + val status = fn(RunnerJobToken(tokens)) + finishJob(tokens, status) + status + } } } - private def recordTarget(target:TargetInstance, phase:Phase, job:RunnerJobToken)(fn: => Status) : Status = { + private def recordTarget(target:TargetInstance, phase:Phase, job:RunnerJobToken, dryRun:Boolean)(fn: => Status) : Status = { def startTarget() : Seq[(JobListener, TargetToken)] = { job.tokens.flatMap { case(listener,jobToken) => try { @@ -376,19 +358,24 @@ final class Runner( } } - val tokens = startTarget() - withShutdownHook(finishTarget(tokens, Status.FAILED)) { - val status = fn - finishTarget(tokens, status) - status + if (dryRun) { + fn + } + else { + val tokens = startTarget() + withShutdownHook(finishTarget(tokens, Status.FAILED)) { + val status = fn + finishTarget(tokens, status) + status + } } } /** - * Performs some checks, if the target is already up to date - * @param target - * @return - */ + * Performs some checks, if the target is already up to date + * @param target + * @return + */ private def checkTarget(target:TargetInstance, phase:Phase) : Boolean = { def checkState(state:TargetState) : Boolean = { val lifecycle = Lifecycle.ofPhase(phase) @@ -440,3 +427,346 @@ final class Runner( status } } + +/** + * Private Implementation for Test specific methods + * @param runner + */ +private[execution] final class TestRunnerImpl(runner:Runner) extends RunnerImpl { + private val parentExecution = runner.parentExecution + + def executeTest(test:Test, keepGoing:Boolean=false, dryRun:Boolean=false) : Status = { + runner.withTestContext(test, dryRun) { context => + val title = s"Running test '${test.identifier}'" + logTitle(title) + logEnvironment(context) + + val startTime = Instant.now() + val execution = new ScopedExecution(parentExecution) + + // Get all targets once here. Otherwise the fixtures would be instantiated over and over again for + // each phase. + val targets = test.targets.map(t => context.getTarget(t)) ++ test.fixtures.values.map(_.instantiate(context)) + + def runPhase(phase:Phase) : Status = { + // Only execute phase if there are targets. This will save some logging outputs + if (targets.exists(_.phases.contains(phase))) { + runner.withPhaseContext(context, phase) { context => + executeTestTargets(execution, context, targets, phase, keepGoing, dryRun) + } + } + else { + Status.SUCCESS + } + } + + // First create test environment via fixtures + val buildStatus = Status.ofAll(Lifecycle.BUILD, keepGoing) { phase => + runPhase(phase) + } + // Now run tests if fixtures where successful + val testStatus = + if (buildStatus == Status.SUCCESS || keepGoing) { + executeTestAssertions(execution, context, test, keepGoing, dryRun) + } + else { + Status.SKIPPED + } + // Finally clean up, even in case of possible failures. + val destroyStatus = Status.ofAll(Lifecycle.DESTROY, true) { phase => + runPhase(phase) + } + + // Compute complete status - which is only SUCCESS if all steps have been executed successfully + val status = + if (Seq(buildStatus, testStatus, destroyStatus).forall(_ == Status.SUCCESS)) + Status.SUCCESS + else + Status.FAILED + + execution.cleanup() + + val endTime = Instant.now() + val duration = Duration.between(startTime, endTime) + logStatus(title, status, duration, endTime) + status + } + } + + private def executeTestAssertions( + execution: Execution, + context:Context, + test:Test, + keepGoing:Boolean, + dryRun:Boolean + ) : Status = { + val title = s"assert test '${test.identifier}'" + logSubtitle(title) + + try { + val startTime = Instant.now() + var numExceptions = 0 + + // First instantiate all assertions + val instances = test.assertions.map { case (name, assertion) => + val instance = assertion.instantiate(context) + name -> instance + } + + // Collect all required DataFrames for caching. We assume that each DataFrame might be used in multiple + // assertions and that the DataFrames aren't very huge (we are talking about tests!) + val inputDataFrames = instances + .flatMap { case(_,instance) => if(!dryRun) instance.inputs else Seq() } + .toSeq + .distinct + .map(id => execution.instantiate(context.getMapping(id.mapping), id.output)) + + val results = DataFrameUtils.withCaches(inputDataFrames) { + instances.map { case (name, instance) => + val description = instance.description.getOrElse(name) + + val status = if (!dryRun) { + try { + execution.assert(instance) + } + catch { + case NonFatal(ex) => + // Pass on exception when keepGoing is false, so next assertions won't be executed + numExceptions = numExceptions + 1 + if (!keepGoing) + throw ex + logger.error(s"Caught exception during $description:", ex) + Seq() + } + } + else { + Seq() + } + + if (status.forall(_.valid)) + logger.info(green(s" ✓ passed: $description")) + else + logger.error(red(s" ✘ failed: $description")) + + // Remember test name, description and status for potential report + (name, description, status) + } + } + + val endTime = Instant.now() + val duration = Duration.between(startTime, endTime) + val numSucceeded = results.map(_._3.count(_.valid)).sum + val numFailed = results.map(_._3.count(!_.valid)).sum + + logger.info(cyan(s"$numSucceeded assertions passed, $numFailed failed, $numExceptions exceptions")) + logger.info(cyan(s"Executed ${numSucceeded + numFailed} assertions in ${duration.toMillis / 1000.0} s")) + + if (numFailed + numExceptions > 0) Status.FAILED else Status.SUCCESS + } + catch { + // Catch all exceptions + case NonFatal(ex) => + logger.error(s"Caught exception during $title:", ex) + Status.FAILED + } + } + + private def executeTestTargets(execution:Execution, context:Context, targets:Seq[Target], phase:Phase, keepGoing:Boolean, dryRun:Boolean) : Status = { + require(phase != null) + + val clazz = execution.flowmanConf.getConf(FlowmanConf.EXECUTION_EXECUTOR_CLASS) + val ctor = clazz.getDeclaredConstructor() + val executor = ctor.newInstance() + + def targetFilter(target:Target) : Boolean = + target.phases.contains(phase) + + executor.execute(execution, context, phase, targets, targetFilter, keepGoing) { (execution, target, phase) => + executeTestTargetPhase(execution, target, phase, dryRun) + } + } + + private def executeTestTargetPhase(execution: Execution, target:Target, phase:Phase, dryRun:Boolean) : Status = { + logSubtitle(s"$phase target '${target.identifier}'") + + // First checkJob if execution is really required + withStatus(target, phase) { + if (!dryRun) { + target.execute(execution, phase) + } + } + } +} + + +/** + * The [[Runner]] class should be used for executing jobs, targets and tests. It will take care of applying additonal + * environment variables, measuring execution time, publishing metrics, error handling and more. + * + * @param parentExecution + * @param stateStore + * @param hooks + */ +final class Runner( + private[execution] val parentExecution:Execution, + private[execution] val stateStore: StateStore, + private[execution] val hooks: Seq[Template[Hook]]=Seq() +) { + require(parentExecution != null) + require(stateStore != null) + require(hooks != null) + + private val logger = LoggerFactory.getLogger(classOf[Runner]) + + /** + * Executes a single job using the given execution and a map of parameters. The Runner may decide not to + * execute a specific job, because some information may indicate that the job has already been successfully + * run in the past. This behaviour can be overridden with the force flag + * @param phases + * @return + */ + def executeJob(job:Job, phases:Seq[Phase], args:Map[String,Any]=Map(), targets:Seq[Regex]=Seq(".*".r), force:Boolean=false, keepGoing:Boolean=false, dryRun:Boolean=false) : Status = { + require(args != null) + require(phases != null) + require(args != null) + + logger.info(s"Executing phases ${phases.map(p => "'" + p + "'").mkString(",")} for job '${job.identifier}'") + val runner = new JobRunnerImpl(this) + runner.executeJob(job, phases, args, targets, force, keepGoing, dryRun) + } + + /** + * Executes an individual test. + * @param test + * @param keepGoing - Continue running assertions even if unexpected exceptions are raised. + * @param dryRun + * @return + */ + def executeTest(test:Test, keepGoing:Boolean=false, dryRun:Boolean=false) : Status = { + val runner = new TestRunnerImpl(this) + runner.executeTest(test, keepGoing, dryRun) + } + + /** + * Executes a single target using the given execution and a map of parameters. The Runner may decide not to + * execute a specific target, because some information may indicate that the job has already been successfully + * run in the past. This behaviour can be overriden with the force flag + * @param targets + * @param phases + * @return + */ + def executeTargets(targets:Seq[Target], phases:Seq[Phase], force:Boolean, keepGoing:Boolean=false, dryRun:Boolean=false) : Status = { + if (targets.nonEmpty) { + val context = targets.head.context + val job = Job.builder(context) + .setName("execute-target") + .setTargets(targets.map(_.identifier)) + .build() + + val runner = new JobRunnerImpl(this) + runner.executeJob(job, phases, Map(), Seq(".*".r), force, keepGoing, dryRun) + } + else { + Status.SUCCESS + } + } + + /** + * Provides a context for the given job. This will apply all environment variables of the job and add + * additional variables like a `force` flag. + * @param job + * @param args + * @param force + * @param fn + * @tparam T + * @return + */ + def withJobContext[T](job:Job, args:Map[String,Any]=Map(), force:Boolean=false, dryRun:Boolean=false)(fn:(Context,Map[String,Any]) => T) : T = { + val arguments : Map[String,Any] = job.parameters.flatMap(p => p.default.map(d => p.name -> d)).toMap ++ args + arguments.toSeq.sortBy(_._1).foreach { case (k,v) => logger.info(s"Job argument $k=$v")} + + verifyArguments(job,arguments) + + val rootContext = RootContext.builder(job.context) + .withEnvironment("force", force) + .withEnvironment("dryRun", dryRun) + .withEnvironment("job", JobWrapper(job)) + .withEnvironment(arguments, SettingLevel.SCOPE_OVERRIDE) + .withEnvironment(job.environment, SettingLevel.JOB_OVERRIDE) + .build() + val jobContext = if (job.context.project.nonEmpty) + rootContext.getProjectContext(job.context.project.get) + else + rootContext + fn(jobContext, arguments) + } + + /** + * Provides a context for a given test. This will apply all environment variables of the test case and add + * additional variables like a `force` flag. + * @param test + * @param dryRun + * @param fn + * @tparam T + * @return + */ + def withTestContext[T](test:Test, dryRun:Boolean=false)(fn:(Context) => T) : T = { + val project = test.project.map(_.name) + val rootContext = RootContext.builder(test.context) + .withEnvironment("force", false) + .withEnvironment("dryRun", dryRun) + //.withEnvironment("job", JobWrapper(job)) + .withEnvironment(test.environment, SettingLevel.JOB_OVERRIDE) + .overrideRelations(test.overrideRelations.map(kv => RelationIdentifier(kv._1, project) -> kv._2)) + .overrideMappings(test.overrideMappings.map(kv => MappingIdentifier(kv._1, project) -> kv._2)) + .build() + val projectContext = if (test.context.project.nonEmpty) + rootContext.getProjectContext(test.context.project.get) + else + rootContext + fn(projectContext) + } + + /** + * Creates an code environment containing a [[Context]] for the specified phase + * @param phase + * @param fn + * @tparam T + * @return + */ + def withPhaseContext[T](jobContext:Context, phase:Phase)(fn:Context => T) : T = { + val context = ScopeContext.builder(jobContext) + .withEnvironment("phase", phase.toString) + .build() + fn(context) + } + + /** + * Creates an code environment containing a [[Environment]] for the specified phase + * @param phase + * @param fn + * @tparam T + * @return + */ + def withEnvironment[T](job:Job, phase:Phase, args:Map[String,Any], force:Boolean, dryRun:Boolean)(fn:Environment => T) : T = { + withJobContext(job, args, force, dryRun) { (jobContext,_) => + withPhaseContext(jobContext, phase) { context => + fn(context.environment) + } + } + } + + def withEnvironment[T](test:Test, dryRun:Boolean)(fn:Environment => T) : T = { + withTestContext(test, dryRun) { context => + fn(context.environment) + } + } + + private def verifyArguments(job:Job, arguments:Map[String,Any]) : Unit = { + // Verify job arguments. This is moved from the constructor into this place, such that only this method throws an exception + val argNames = arguments.keySet + val paramNames = job.parameters.map(_.name).toSet + argNames.diff(paramNames).foreach(p => throw new IllegalArgumentException(s"Unexpected argument '$p' not defined in job '${job.identifier}'")) + paramNames.diff(argNames).foreach(p => throw new IllegalArgumentException(s"Required parameter '$p' not specified for job '${job.identifier}'")) + } +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/ScopeContext.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/ScopeContext.scala index 955e56118..23fe86ee2 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/ScopeContext.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/ScopeContext.scala @@ -33,6 +33,8 @@ import com.dimajix.flowman.model.RelationIdentifier import com.dimajix.flowman.model.Target import com.dimajix.flowman.model.TargetIdentifier import com.dimajix.flowman.model.Template +import com.dimajix.flowman.model.Test +import com.dimajix.flowman.model.TestIdentifier object ScopeContext { @@ -43,6 +45,7 @@ object ScopeContext { private var relations = Map[String, Template[Relation]]() private var targets = Map[String, Template[Target]]() private var jobs = Map[String, Template[Job]]() + private var tests = Map[String, Template[Test]]() def withMappings(mappings:Map[String,Template[Mapping]]) : Builder = { require(mappings != null) @@ -64,6 +67,11 @@ object ScopeContext { this.jobs = this.jobs ++ jobs this } + def withTests(tests:Map[String,Template[Test]]) : Builder = { + require(tests != null) + this.tests = this.tests ++ tests + this + } override protected val logger = LoggerFactory.getLogger(classOf[ScopeContext]) @@ -76,7 +84,8 @@ object ScopeContext { relations, targets, connections, - jobs + jobs, + tests ) } } @@ -85,7 +94,7 @@ object ScopeContext { } -class ScopeContext( +final class ScopeContext( parent:Context, fullEnv:Map[String,(Any, Int)], fullConfig:Map[String,(String, Int)], @@ -93,17 +102,41 @@ class ScopeContext( scopeRelations:Map[String,Template[Relation]] = Map(), scopeTargets:Map[String,Template[Target]] = Map(), scopeConnections:Map[String,Template[Connection]] = Map(), - scopeJobs:Map[String,Template[Job]] = Map() + scopeJobs:Map[String,Template[Job]] = Map(), + scopeTests:Map[String,Template[Test]] = Map() ) extends AbstractContext(fullEnv, fullConfig) { private val mappings = mutable.Map[String,Mapping]() private val relations = mutable.Map[String,Relation]() private val targets = mutable.Map[String,Target]() private val connections = mutable.Map[String,Connection]() private val jobs = mutable.Map[String,Job]() + private val tests = mutable.Map[String,Test]() + /** + * Returns the namespace associated with this context. Can be null + * @return + */ override def namespace: Option[Namespace] = parent.namespace + + /** + * Returns the project associated with this context. Can be [[None]] + * @return + */ override def project: Option[Project] = parent.project + + /** + * Returns the root context in a hierarchy of connected contexts + * @return + */ override def root: RootContext = parent.root + + /** + * Returns the list of active profile names + * + * @return + */ + override def profiles: Set[String] = parent.profiles + override def getConnection(identifier: ConnectionIdentifier): Connection = { if (identifier.project.isEmpty) { connections.get(identifier.name) match { @@ -121,7 +154,7 @@ class ScopeContext( parent.getConnection(identifier) } } - override def getMapping(identifier: MappingIdentifier): Mapping = { + override def getMapping(identifier: MappingIdentifier, allowOverrides:Boolean=true): Mapping = { if (identifier.project.isEmpty) { mappings.get(identifier.name) match { case Some(result) => result @@ -130,15 +163,15 @@ class ScopeContext( val result = spec.instantiate(this) mappings.put(identifier.name, result) result - case None => parent.getMapping(identifier) + case None => parent.getMapping(identifier, allowOverrides) } } } else { - parent.getMapping(identifier) + parent.getMapping(identifier, allowOverrides) } } - override def getRelation(identifier: RelationIdentifier): Relation = { + override def getRelation(identifier: RelationIdentifier, allowOverrides:Boolean=true): Relation = { if (identifier.project.isEmpty) { relations.get(identifier.name) match { case Some(result) => result @@ -147,12 +180,12 @@ class ScopeContext( val result = spec.instantiate(this) relations.put(identifier.name, result) result - case None => parent.getRelation(identifier) + case None => parent.getRelation(identifier, allowOverrides) } } } else { - parent.getRelation(identifier) + parent.getRelation(identifier, allowOverrides) } } override def getTarget(identifier: TargetIdentifier): Target = { @@ -189,4 +222,21 @@ class ScopeContext( parent.getJob(identifier) } } + override def getTest(identifier: TestIdentifier): Test = { + if (identifier.project.isEmpty) { + tests.get(identifier.name) match { + case Some(result) => result + case None => scopeTests.get(identifier.name) match { + case Some(spec) => + val result = spec.instantiate(this) + tests.put(identifier.name, result) + result + case None => parent.getTest(identifier) + } + } + } + else { + parent.getTest(identifier) + } + } } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/ScopedExecutor.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/ScopedExecution.scala similarity index 92% rename from flowman-core/src/main/scala/com/dimajix/flowman/execution/ScopedExecutor.scala rename to flowman-core/src/main/scala/com/dimajix/flowman/execution/ScopedExecution.scala index 72ab20a9f..daf018f48 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/ScopedExecutor.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/ScopedExecution.scala @@ -25,8 +25,8 @@ import com.dimajix.flowman.hadoop.FileSystem import com.dimajix.flowman.metric.MetricSystem -class ScopedExecutor(parent:Executor) extends CachingExecutor(Some(parent), true) { - override protected val logger = LoggerFactory.getLogger(classOf[ScopedExecutor]) +class ScopedExecution(parent:Execution) extends CachingExecution(Some(parent), true) { + override protected val logger = LoggerFactory.getLogger(classOf[ScopedExecution]) /** * Returns the FlowmanConf object, which contains all Flowman settings. @@ -35,7 +35,7 @@ class ScopedExecutor(parent:Executor) extends CachingExecutor(Some(parent), true def flowmanConf : FlowmanConf = parent.flowmanConf /** - * Returns the MetricRegistry of this executor + * Returns the MetricRegistry of this execution * @return */ override def metrics : MetricSystem = parent.metrics diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/Session.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Session.scala index 6eef3983b..d9eea6027 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/Session.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/Session.scala @@ -23,7 +23,6 @@ import org.apache.spark.sql.SparkShim import org.slf4j.LoggerFactory import com.dimajix.flowman.catalog.Catalog -import com.dimajix.flowman.catalog.ExternalCatalog import com.dimajix.flowman.config.Configuration import com.dimajix.flowman.config.FlowmanConf import com.dimajix.flowman.hadoop.FileSystem @@ -31,10 +30,11 @@ import com.dimajix.flowman.history.NullStateStore import com.dimajix.flowman.history.StateStore import com.dimajix.flowman.metric.MetricSystem import com.dimajix.flowman.model.Hook -import com.dimajix.flowman.model.Job import com.dimajix.flowman.model.Namespace import com.dimajix.flowman.model.Project import com.dimajix.flowman.model.Template +import com.dimajix.flowman.spi.LogFilter +import com.dimajix.flowman.spi.SparkExtension import com.dimajix.flowman.spi.UdfProvider import com.dimajix.flowman.storage.NullStore import com.dimajix.flowman.storage.Store @@ -163,7 +163,7 @@ object Session { * @param profiles * @return */ - def withProfiles(profiles:Seq[String]) : Builder = { + def withProfiles(profiles:Iterable[String]) : Builder = { require(profiles != null) this.profiles = this.profiles ++ profiles this @@ -174,7 +174,7 @@ object Session { * @param jars * @return */ - def withJars(jars:Seq[String]) : Builder = { + def withJars(jars:Iterable[String]) : Builder = { require(jars != null) this.jars = this.jars ++ jars this @@ -237,21 +237,34 @@ class Session private[execution]( _jars.toSeq } private def sparkMaster : String = { - _sparkMaster + // How should priorities look like? + // 1. Spark master from Flowman config. + // 2. Spark master from application code / session builder + // 3. Spark master from spark-submit. This is to be found in SparkConf + // 4. Default master + config.toMap.get("spark.master") .filter(_.nonEmpty) - .orElse(Option(System.getProperty("spark.master"))) + .orElse(_sparkMaster) + .filter(_.nonEmpty) + .orElse(sparkConf.getOption("spark.master")) .filter(_.nonEmpty) .getOrElse("local[*]") } private def sparkName : String = { - if (sparkConf.contains("spark.app.name")) { - sparkConf.get("spark.app.name") - } - else { - _sparkName - .filter(_.nonEmpty) - .getOrElse("Flowman") - } + // How should priorities look like? + // 1. Spark app name from Flowman config. + // 2. Spark app name from application code + // 3. Spark app name from spark-submit / command-line + // 4. Spark app name from Flowman project + // 5. Default Spark app name + config.toMap.get("spark.app.name") + .filter(_.nonEmpty) + .orElse(_sparkName) + .filter(_.nonEmpty) + .orElse(_project.map(_.name).filter(_.nonEmpty).map("Flowman - " + _)) + .orElse(sparkConf.getOption("spark.app.name")) + .filter(_.nonEmpty) + .getOrElse("Flowman") } /** @@ -266,15 +279,15 @@ class Session private[execution]( Option(_sparkSession) .flatMap(builder => Option(builder(sparkConf))) - .map { injectedSession => + .map { spark => logger.info("Creating Spark session using provided builder") // Set all session properties that can be changed in an existing session sparkConf.getAll.foreach { case (key, value) => if (!SparkShim.isStaticConf(key)) { - injectedSession.conf.set(key, value) + spark.conf.set(key, value) } } - injectedSession + spark } .getOrElse { logger.info("Creating new Spark session") @@ -284,6 +297,9 @@ class Session private[execution]( logger.info("Enabling Spark Hive support") sessionBuilder.enableHiveSupport() } + // Apply all session extensions to builder + SparkExtension.extensions.foldLeft(sessionBuilder)((builder,ext) => ext.register(builder)) + // Create Spark session sessionBuilder.getOrCreate() } } @@ -298,18 +314,25 @@ class Session private[execution]( // Register additional planning strategies ExtraStrategies.register(spark) + // Apply all session extensions + SparkExtension.extensions.foreach(_.register(spark)) + + // Register special UDFs + UdfProvider.providers.foreach(_.register(spark.udf)) + // Distribute additional Plugin jar files sparkJars.foreach(spark.sparkContext.addJar) // Log all config properties - spark.conf.getAll.toSeq.sortBy(_._1).foreach { case (key, value)=> logger.info("Config: {} = {}", key: Any, value: Any) } + val logFilters = LogFilter.filters + spark.conf.getAll.toSeq.sortBy(_._1).foreach { keyValue => + logFilters.foldLeft(Option(keyValue))((kv, f) => kv.flatMap(kv => f.filterConfig(kv._1,kv._2))) + .foreach { case (key,value) => logger.info("Config: {} = {}", key: Any, value: Any) } + } // Copy all Spark configs over to SparkConf inside the Context sparkConf.setAll(spark.conf.getAll) - // Register special UDFs - UdfProvider.providers.foreach(_.register(spark.udf)) - spark } private var sparkSession:SparkSession = null @@ -319,7 +342,7 @@ class Session private[execution]( Some(store.loadProject(name)) } - val builder = RootContext.builder(_namespace, _profiles.toSeq) + val builder = RootContext.builder(_namespace, _profiles) .withEnvironment(_environment, SettingLevel.GLOBAL_OVERRIDE) .withConfig(_config, SettingLevel.GLOBAL_OVERRIDE) .withProjectResolver(loadProject) @@ -345,8 +368,8 @@ class Session private[execution]( } } - private lazy val rootExecutor : RootExecutor = { - new RootExecutor(this) + private lazy val rootExecution : RootExecution = { + new RootExecution(this) } private lazy val _catalog = { @@ -387,6 +410,13 @@ class Session private[execution]( */ def project : Option[Project] = _project + /** + * Returns the list of active profile names + * + * @return + */ + def profiles: Set[String] = _profiles + /** * Returns the storage used to manage projects * @return @@ -410,7 +440,7 @@ class Session private[execution]( * @return */ def runner : Runner = { - new Runner(executor, _history, _hooks) + new Runner(execution, _history, _hooks) } /** @@ -464,12 +494,11 @@ class Session private[execution]( def context : Context = rootContext /** - * Returns the root executor of this session. Every project has its own derived executor, which should - * be used instead if working with a project - * + * Returns the root execution of this session. You might want to wrap it up into a [[ScopedExecution]] to + * isolate resources. * @return */ - def executor : Executor = rootExecutor + def execution : Execution = rootExecution /** * Either returns an existing or creates a new project specific context diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/SimpleExecutor.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/SimpleExecutor.scala new file mode 100644 index 000000000..2a9afd6fa --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/SimpleExecutor.scala @@ -0,0 +1,59 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.execution + +import org.slf4j.LoggerFactory + +import com.dimajix.flowman.model.Target + + +class SimpleExecutor extends Executor { + private val logger = LoggerFactory.getLogger(classOf[SimpleExecutor]) + + /** + * Executes a list of targets in an appropriate order. + * + * @param execution + * @param context + * @param phase - Phase to execute + * @param targets - List of all targets, even those which should not be executed + * @param filter - Filter predicate to find all targets to be execution + * @param keepGoing - True if errors in one target should not stop other targets from being executed + * @param fn - Function to call. Note that the function is expected not to throw a non-fatal exception. + * @return + */ + def execute(execution: Execution, context:Context, phase: Phase, targets: Seq[Target], filter:Target => Boolean, keepGoing: Boolean)(fn:(Execution,Target,Phase) => Status) : Status = { + // First determine ordering before filtering active targets, since their might be some transitive dependencies + // in place. For example accessing a VIEW which does not require a BUILD but accesses other resources + val orderedTargets = phase match { + case Phase.DESTROY | Phase.TRUNCATE => TargetOrdering.sort(targets, phase).reverse + case _ => TargetOrdering.sort(targets, phase) + } + + // Filter targets by phase then by optional target name patterns + val activeTargets = orderedTargets + .filter(_.phases.contains(phase)) + .filter(filter) + + logger.info(s"Target order for $phase:") + activeTargets.foreach(t => logger.info(" - " + t.identifier)) + + Status.ofAll(activeTargets, keepGoing) { target => + fn(execution, target, phase) + } + } +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/TargetOrdering.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/TargetOrdering.scala index f17ed6fc5..d5109befe 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/TargetOrdering.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/TargetOrdering.scala @@ -76,7 +76,7 @@ object TargetOrdering { }) nodes.foreach { case(n,deps) => - logger.info(s"Dependencies of phase '$phase' of target '$n': ${deps.map(_.toString).mkString(",")}") + logger.debug(s"Dependencies of phase '$phase' of target '$n': ${deps.map(_.toString).mkString(",")}") } val order = mutable.ListBuffer[TargetIdentifier]() @@ -84,8 +84,7 @@ object TargetOrdering { val candidate = nodes.find(_._2.isEmpty).map(_._1) .getOrElse({ val deps = nodes.map { case(k,v) => s" $k <= ${v.toSeq.mkString(", ")}"}.mkString("\n") - logger.error(s"Cannot create target order due to cyclic dependencies:\n$deps") - throw new RuntimeException("Cannot create target order") + throw new RuntimeException(s"Cannot create target order due to cyclic dependencies:\n$deps") }) // Remove candidate diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/execution/exceptions.scala b/flowman-core/src/main/scala/com/dimajix/flowman/execution/exceptions.scala index 5a20709fc..8f6e8e77c 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/execution/exceptions.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/execution/exceptions.scala @@ -22,6 +22,7 @@ import com.dimajix.flowman.model.MappingIdentifier import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.RelationIdentifier import com.dimajix.flowman.model.TargetIdentifier +import com.dimajix.flowman.model.TestIdentifier class ExecutionException( @@ -48,7 +49,11 @@ class NoSuchConnectionException(val connection:ConnectionIdentifier) extends ExecutionException(s"Connection '$connection' not found") class NoSuchJobException(val job:JobIdentifier) extends ExecutionException(s"Job '$job' not found") +class NoSuchTestException(val test:TestIdentifier) + extends ExecutionException(s"Test '$test' not found") +class ValidationFailedException(val target:TargetIdentifier, cause:Throwable = None.orNull) + extends ExecutionException(s"Validation of target $target failed", cause) class VerificationFailedException(val target:TargetIdentifier, cause:Throwable = None.orNull) extends ExecutionException(s"Verification of target $target failed", cause) diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/graph/Graph.scala b/flowman-core/src/main/scala/com/dimajix/flowman/graph/Graph.scala new file mode 100644 index 000000000..319593cc6 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/graph/Graph.scala @@ -0,0 +1,118 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.graph + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.NoSuchMappingException +import com.dimajix.flowman.execution.NoSuchRelationException +import com.dimajix.flowman.execution.NoSuchTargetException +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.Relation +import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.Target +import com.dimajix.flowman.model.TargetIdentifier + + +object Graph { + /** + * Creates a Graph from a given project. The [[Context]] required for lookups and instantiation is retrieved from + * the given [[Session]] + * @param session + * @param project + * @return + */ + def ofProject(session:Session, project:Project) : Graph = { + ofProject(session.getContext(project), project) + } + + /** + * Creates a Graph from a given project. The specified [[Context]] has to be created for the given project + * @param session + * @param project + * @return + */ + def ofProject(context:Context, project:Project) : Graph = { + if (context.project.exists(_ ne project)) + throw new IllegalArgumentException("Graph.ofProject requires Context to belong to the given Project") + + val builder = new GraphBuilder(context) + project.mappings.keys.foreach { name => + builder.addMapping(MappingIdentifier(name)) + } + project.relations.keys.foreach { name => + builder.addRelation(RelationIdentifier(name)) + } + project.targets.keys.foreach { name => + builder.addTarget(TargetIdentifier(name)) + } + + builder.build() + } +} + + +case class Graph( + context:Context, + mappings:Seq[MappingRef], + relations:Seq[RelationRef], + targets:Seq[TargetRef] +) { + def nodes : Seq[Node] = mappings ++ relations ++ targets + def edges : Seq[Edge] = nodes.flatMap(_.outgoing) + + /** + * Tries to retrieve a node representing the specified Mapping + * @param mapping + * @return + */ + def mapping(instance:Mapping) : MappingRef = { + mappings.find(_.mapping eq instance).getOrElse(throw new NoSuchMappingException(instance.identifier)) + } + def mapping(id:MappingIdentifier) : MappingRef = { + val instance = context.getMapping(id) + mapping(instance) + } + + /** + * Tries to retrieve a node representing the specified Relation + * @param relation + * @return + */ + def relation(instance:Relation) : RelationRef = { + relations.find(_.relation eq instance).getOrElse(throw new NoSuchRelationException(instance.identifier)) + } + def relation(id:RelationIdentifier) : RelationRef = { + val instance = context.getRelation(id) + relation(instance) + } + + /** + * Tries to retrieve a node representing the specified Target + * @param target + * @return + */ + def target(instance:Target) : TargetRef = { + targets.find(_.target eq instance).getOrElse(throw new NoSuchTargetException(instance.identifier)) + } + def target(id:TargetIdentifier) : TargetRef = { + val instance = context.getTarget(id) + target(instance) + } +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/graph/GraphBuilder.scala b/flowman-core/src/main/scala/com/dimajix/flowman/graph/GraphBuilder.scala new file mode 100644 index 000000000..d959d2a3c --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/graph/GraphBuilder.scala @@ -0,0 +1,174 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.graph + +import com.dimajix.common.IdentityHashMap +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.Relation +import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.Target +import com.dimajix.flowman.model.TargetIdentifier + + +class GraphBuilder(context:Context) { + private val mappings:IdentityHashMap[Mapping,MappingRef] = IdentityHashMap() + private val relations:IdentityHashMap[Relation,RelationRef] = IdentityHashMap() + private val targets:IdentityHashMap[Target,TargetRef] = IdentityHashMap() + private var currentId:Int = 1 + + /** + * Adds a single [[Mapping]] to the [[GraphBuilder]] and performs all required linking operations to connect the + * mapping to its inputs + * @param mapping + * @return + */ + def addMapping(mapping:MappingIdentifier) : GraphBuilder = { + val instance = context.getMapping(mapping) + refMapping(instance) + this + } + def addMapping(mapping:Mapping) : GraphBuilder = { + refMapping(mapping) + this + } + def addMappings(mappings:Iterable[MappingIdentifier]) : GraphBuilder = { + mappings.foreach(addMapping) + this + } + + /** + * Adds a single [[Target]] to the [[GraphBuilder]] and performs all required linking operations to connect the + * target to its inputs and outputs + * @param target + * @return + */ + def addTarget(target:TargetIdentifier) : GraphBuilder = { + val instance = context.getTarget(target) + refTarget(instance) + this + } + def addTarget(target:Target) : GraphBuilder = { + refTarget(target) + this + } + def addTargets(targets:Iterable[TargetIdentifier]) : GraphBuilder = { + targets.foreach(addTarget) + this + } + + /** + * Adds a single [[Relation]] to the [[GraphBuilder]] and performs all linking operations. + * @param relation + * @return + */ + def addRelation(relation:RelationIdentifier) : GraphBuilder = { + val instance = context.getRelation(relation) + refRelation(instance) + this + } + def addRelation(relation:Relation) : GraphBuilder = { + refRelation(relation) + this + } + def addRelations(relations:Iterable[RelationIdentifier]) : GraphBuilder = { + relations.foreach(addRelation) + this + } + + + /** + * Retrieves a reference node for a mapping. + * @param mapping + * @return + */ + def refMapping(mapping: Mapping) : MappingRef = { + val result = mappings.get(mapping) + if (result.nonEmpty) { + result.get + } + else { + // Create new node and *first* put it into map of known mappings + val node = MappingRef(nextId(), mapping) + mappings.put(mapping, node) + // Now recursively run the linking process on the newly created node + val linker = Linker(this, mapping.context, node) + mapping.link(linker) + node + } + } + + /** + * Retrieves a reference node for a relation. + * @param relation + * @return + */ + def refRelation(relation: Relation) : RelationRef = { + val result = relations.get(relation) + if (result.nonEmpty) { + result.get + } + else { + // Create new node and *first* put it into map of known relations + val node = RelationRef(nextId(), relation) + relations.put(relation, node) + // Now recursively run the linking process on the newly created node + val linker = Linker(this, relation.context, node) + relation.link(linker) + node + } + } + + /** + * Retrieves a reference node for a target. + * @param target + * @return + */ + def refTarget(target: Target) : TargetRef = { + val result = targets.get(target) + if (result.nonEmpty) { + result.get + } + else { + // Create new node and *first* put it into map of known targets + val node = TargetRef(nextId(), target) + targets.put(target, node) + // Now recursively run the linking process on the newly created node + val linker = Linker(this, target.context, node) + target.link(linker) + node + } + } + + /** + * Builds the full graph + * @return + */ + def build() : Graph = Graph( + context, + mappings.values.toList, + relations.values.toList, + targets.values.toList + ) + + private def nextId() : Int = { + val result = currentId + currentId += 1 + result + } +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/graph/Linker.scala b/flowman-core/src/main/scala/com/dimajix/flowman/graph/Linker.scala new file mode 100644 index 000000000..823ceadfa --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/graph/Linker.scala @@ -0,0 +1,55 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.graph + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.types.FieldValue +import com.dimajix.flowman.types.SingleValue + + +case class Linker private[graph](builder:GraphBuilder, context:Context, node:Node) { + def input(mapping: MappingIdentifier, output:String) : Linker = { + val instance = context.getMapping(mapping) + val in = builder.refMapping(instance) + val edge = InputMapping(in, node, output) + link(edge) + } + def read(relation: RelationIdentifier, partitions:Map[String,FieldValue]) : Linker = { + val instance = context.getRelation(relation) + val in = builder.refRelation(instance) + val edge = ReadRelation(in, node, partitions) + link(edge) + } + def write(relation: RelationIdentifier, partition:Map[String,SingleValue]) : Linker = { + val instance = context.getRelation(relation) + val out = builder.refRelation(instance) + val edge = WriteRelation(node, out, partition) + link(edge) + } + + /** + * Performs a linking operation by adding an edge + * @param edge + */ + def link(edge:Edge) : Linker = { + edge.input.outEdges.append(edge) + edge.output.inEdges.append(edge) + this + } +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/graph/edges.scala b/flowman-core/src/main/scala/com/dimajix/flowman/graph/edges.scala new file mode 100644 index 000000000..945b415f7 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/graph/edges.scala @@ -0,0 +1,42 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.graph + +import com.dimajix.flowman.model.ResourceIdentifier +import com.dimajix.flowman.types.FieldValue +import com.dimajix.flowman.types.SingleValue + + +sealed abstract class Edge { + def input : Node + def output : Node + def action : String +} + +case class ReadRelation(override val input:RelationRef, override val output:Node, partitions:Map[String,FieldValue] = Map()) extends Edge { + override def action: String = s"READ from ${input.label} partitions=(${partitions.map(kv => kv._1 + "=" + kv._2).mkString(",")})" + def resources : Set[ResourceIdentifier] = input.relation.resources(partitions) +} + +case class InputMapping(override val input:MappingRef,override val output:Node,pin:String="main") extends Edge { + override def action: String = s"INPUT from ${input.label} output '$pin'" +} + +case class WriteRelation(override val input:Node, override val output:RelationRef, partition:Map[String,SingleValue] = Map()) extends Edge { + override def action: String = s"WRITE from ${input.label} partition=(${partition.map(kv => kv._1 + "=" + kv._2).mkString(",")})" + def resources : Set[ResourceIdentifier] = output.relation.resources(partition) +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/graph/nodes.scala b/flowman-core/src/main/scala/com/dimajix/flowman/graph/nodes.scala new file mode 100644 index 000000000..d1f46e569 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/graph/nodes.scala @@ -0,0 +1,100 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.graph + +import scala.collection.mutable + +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.Relation +import com.dimajix.flowman.model.ResourceIdentifier +import com.dimajix.flowman.model.Target + + +sealed abstract class Node { + private[graph] val inEdges = mutable.Buffer[Edge]() + private[graph] val outEdges = mutable.Buffer[Edge]() + private[graph] val _parent : Option[Node] = None + private[graph] val _children = mutable.Seq[Node]() + + /** Unique node ID, generated by GraphBuilder */ + val id : Int + + def label : String = s"($id) $category/$kind: '$name'" + + def category : String + def kind : String + def name : String + + def incoming : Seq[Edge] = inEdges + def outgoing : Seq[Edge] = outEdges + def parent : Option[Node] = _parent + def children : Seq[Node] = _children + + def upstreamDependencyTree : String = { + label + "\n" + upstreamTreeRec + } + + private def upstreamTreeRec : String = { + def indentSubtree(lines:Iterator[String], margin:Boolean) : Iterator[String] = { + if (lines.nonEmpty) { + val prefix = if (margin) " | " else " " + val firstLine = " +- " + lines.next() + Iterator(firstLine) ++ lines.map(prefix + _) + } + else { + Iterator() + } + } + val trees = incoming.map { child => + child.action + "\n" + child.input.upstreamTreeRec + } + val headChildren = trees.dropRight(1) + val lastChild = trees.takeRight(1) + + val headTree = headChildren.flatMap(l => indentSubtree(l.linesIterator, true)) + val tailTree = lastChild.flatMap(l => indentSubtree(l.linesIterator, false)) + (headTree ++ tailTree).mkString("\n") + } +} + +case class MappingRef(id:Int, mapping:Mapping) extends Node { + override def category: String = "mapping" + override def kind: String = mapping.kind + override def name: String = mapping.name +} +case class TargetRef(id:Int, target:Target) extends Node { + override def category: String = "target" + override def kind: String = target.kind + override def name: String = target.name +} +case class RelationRef(id:Int, relation:Relation) extends Node { + override def category: String = "relation" + override def kind: String = relation.kind + override def name: String = relation.name + + def resources : Set[ResourceIdentifier] = relation.resources(Map()) ++ relation.provides +} +case class MappingColumn(id:Int, mapping: Mapping, output:String, column:String) extends Node { + override def category: String = "mapping_column" + override def kind: String = "mapping_column" + override def name: String = mapping.name + "." + output + "." + column +} +case class RelationColumn(id:Int, relation: Relation, column:String) extends Node { + override def category: String = "relation_column" + override def kind: String = "relation_column" + override def name: String = relation.name + "." + column +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/hadoop/GlobPattern.scala b/flowman-core/src/main/scala/com/dimajix/flowman/hadoop/GlobPattern.scala index 9b14dbde3..5db66faf4 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/hadoop/GlobPattern.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/hadoop/GlobPattern.scala @@ -48,55 +48,53 @@ case class GlobPattern(globPattern: String) { var setOpen = 0 var curlyOpen = 0 var isBackslash = false - globPattern.foreach { c => - c match { - case _ if isBackslash => + globPattern.foreach { + case c if isBackslash => + regex.append(c) + isBackslash = false + case BACKSLASH => + regex.append(BACKSLASH) + isBackslash = true + case c@('.' | '$' | '(' | ')' | '|' | '+') => + // escape regex special chars that are not glob special chars + regex.append(BACKSLASH) + regex.append(c) + case '*' => + regex.append("[^/]+") + _hasWildcard = true + case '?' => + regex.append("[^/\\*]") + _hasWildcard = true + case '{' => // start of a group + regex.append("(?:") // non-capturing + curlyOpen += 1 + _hasWildcard = true + case c@',' => + regex.append(if (curlyOpen > 0) '|' else c) + case c@'}' => + if (curlyOpen > 0) { // end of a group + curlyOpen -= 1 + regex.append(")") + } else { regex.append(c) - isBackslash = false - case BACKSLASH => - regex.append(BACKSLASH) - isBackslash = true - case '.'| '$' | '(' | ')' | '|' | '+' => - // escape regex special chars that are not glob special chars - regex.append(BACKSLASH) - regex.append(c) - case '*' => - regex.append("[^/]+") - _hasWildcard = true - case '?' => - regex.append("[^/\\*]") - _hasWildcard = true - case '{' => // start of a group - regex.append("(?:") // non-capturing - curlyOpen += 1 - _hasWildcard = true - case ',' => - regex.append(if (curlyOpen > 0) '|' else c) - case '}' => - if (curlyOpen > 0) { // end of a group - curlyOpen -= 1 - regex.append(")") - } else { - regex.append(c) - } - case '[' => - //if (setOpen > 0) GlobPattern.error("Unclosed character class", glob, i) - setOpen += 1 - _hasWildcard = true - regex.append(c) - case '^' => // ^ inside [...] can be unescaped - if (setOpen == 0) regex.append(BACKSLASH) - regex.append(c) - case '!' => // TODO: [! needs to be translated to [^ - regex.append('!') - case ']' => - // Many set errors like [][] could not be easily detected here, - // as []], []-] and [-] are all valid POSIX glob and java regex. - // We'll just let the regex compiler do the real work. - setOpen -= 1 - regex.append(c) - case _ => regex.append(c) - } + } + case c@'[' => + //if (setOpen > 0) GlobPattern.error("Unclosed character class", glob, i) + setOpen += 1 + _hasWildcard = true + regex.append(c) + case c@'^' => // ^ inside [...] can be unescaped + if (setOpen == 0) regex.append(BACKSLASH) + regex.append(c) + case '!' => // TODO: [! needs to be translated to [^ + regex.append('!') + case c@']' => + // Many set errors like [][] could not be easily detected here, + // as []], []-] and [-] are all valid POSIX glob and java regex. + // We'll just let the regex compiler do the real work. + setOpen -= 1 + regex.append(c) + case c => regex.append(c) } //if (setOpen > 0) GlobPattern.error("Unclosed character class", glob, len) //if (curlyOpen > 0) GlobPattern.error("Unclosed group", glob, len) diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/history/JdbcStateRepository.scala b/flowman-core/src/main/scala/com/dimajix/flowman/history/JdbcStateRepository.scala index 135400832..67583b7e6 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/history/JdbcStateRepository.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/history/JdbcStateRepository.scala @@ -89,7 +89,13 @@ private[history] class JdbcStateRepository(connection: JdbcStateStore.Connection val props = new Properties() connection.properties.foreach(kv => props.setProperty(kv._1, kv._2)) logger.debug(s"Connecting via JDBC to $url with driver $driver") - Database.forURL(url, driver=driver, user=user.orNull, password=password.orNull, prop=props) + val executor = slick.util.AsyncExecutor( + name="Flowman.default", + minThreads = 20, + maxThreads = 20, + queueSize = 1000, + maxConnections = 20) + Database.forURL(url, driver=driver, user=user.orNull, password=password.orNull, prop=props, executor=executor) } val jobRuns = TableQuery[JobRuns] @@ -235,12 +241,12 @@ private[history] class JdbcStateRepository(connection: JdbcStateStore.Connection def findJob(query:JobQuery, order:Seq[JobOrder], limit:Int, offset:Int) : Seq[JobState] = { def mapOrderColumn(order:JobOrder) : JobRuns => Rep[_] = { - order match { - case JobOrder.BY_DATETIME => t => t.start_ts - case JobOrder.BY_ID => t => t.id - case JobOrder.BY_NAME => t => t.job - case JobOrder.BY_PHASE => t => t.phase - case JobOrder.BY_STATUS => t => t.status + order.column match { + case JobOrderColumn.BY_DATETIME => t => t.start_ts + case JobOrderColumn.BY_ID => t => t.id + case JobOrderColumn.BY_NAME => t => t.job + case JobOrderColumn.BY_PHASE => t => t.phase + case JobOrderColumn.BY_STATUS => t => t.status } } def mapOrderDirection(order:JobOrder) : slick.ast.Ordering = { @@ -335,12 +341,12 @@ private[history] class JdbcStateRepository(connection: JdbcStateStore.Connection def findTarget(query:TargetQuery, order:Seq[TargetOrder], limit:Int, offset:Int) : Seq[TargetState] = { def mapOrderColumn(order:TargetOrder) : TargetRuns => Rep[_] = { - order match { - case TargetOrder.BY_DATETIME => t => t.start_ts - case TargetOrder.BY_ID => t => t.id - case TargetOrder.BY_NAME => t => t.target - case TargetOrder.BY_PHASE => t => t.phase - case TargetOrder.BY_STATUS => t => t.status + order.column match { + case TargetOrderColumn.BY_DATETIME => t => t.start_ts + case TargetOrderColumn.BY_ID => t => t.id + case TargetOrderColumn.BY_NAME => t => t.target + case TargetOrderColumn.BY_PHASE => t => t.phase + case TargetOrderColumn.BY_STATUS => t => t.status } } def mapOrderDirection(order:TargetOrder) : slick.ast.Ordering = { diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/history/JdbcStateStore.scala b/flowman-core/src/main/scala/com/dimajix/flowman/history/JdbcStateStore.scala index 4fac860c0..700337003 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/history/JdbcStateStore.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/history/JdbcStateStore.scala @@ -71,7 +71,7 @@ case class JdbcStateStore(connection:JdbcStateStore.Connection, retries:Int=3, t null, null ) - logger.info(s"Checking last state of phase '${run.phase}' of job '${run.namespace}/${run.project}/${run.job}' in state database") + logger.debug(s"Checking last state of phase '${run.phase}' of job '${run.namespace}/${run.project}/${run.job}' in state database") withSession { repository => repository.getJobState(run) } @@ -96,7 +96,7 @@ case class JdbcStateStore(connection:JdbcStateStore.Connection, retries:Int=3, t Status.RUNNING.value ) - logger.info(s"Writing startJob marker for phase '${phase}' of job '${run.namespace}/${run.project}/${run.job}' into state database") + logger.debug(s"Start '${phase}' job '${run.namespace}/${run.project}/${run.job}' in state database") withSession { repository => repository.insertJobRun(run, job.args) } @@ -109,7 +109,7 @@ case class JdbcStateStore(connection:JdbcStateStore.Connection, retries:Int=3, t */ override def finishJob(token:JobToken, status: Status) : Unit = { val run = token.asInstanceOf[JobRun] - logger.info(s"Mark last run of phase '${run.phase}' of job '${run.namespace}/${run.project}/${run.job}' as $status in state database") + logger.info(s"Mark '${run.phase}' job '${run.namespace}/${run.project}/${run.job}' as $status in state database") val now = new Timestamp(Clock.systemDefaultZone().instant().toEpochMilli) withSession{ repository => @@ -136,7 +136,7 @@ case class JdbcStateStore(connection:JdbcStateStore.Connection, retries:Int=3, t null, null ) - logger.info(s"Checking last state of target ${run.namespace}/${run.project}/${run.target} in state database") + logger.debug(s"Checking state of target ${run.namespace}/${run.project}/${run.target} in state database") withSession { repository => repository.getTargetState(run, target.partitions) } @@ -162,7 +162,7 @@ case class JdbcStateStore(connection:JdbcStateStore.Connection, retries:Int=3, t Status.RUNNING.value ) - logger.info(s"Writing start marker for phase '$phase' of target '${run.namespace}/${run.project}/${run.target}' into state database") + logger.debug(s"Start '$phase' target '${run.namespace}/${run.project}/${run.target}' in state database") withSession { repository => repository.insertTargetRun(run, target.partitions) } @@ -175,7 +175,7 @@ case class JdbcStateStore(connection:JdbcStateStore.Connection, retries:Int=3, t */ override def finishTarget(token:TargetToken, status: Status) : Unit = { val run = token.asInstanceOf[TargetRun] - logger.info(s"Mark last run of phase '${run.phase}' of target '${run.namespace}/${run.project}/${run.target}' as $status in state database") + logger.info(s"Mark '${run.phase}' target '${run.namespace}/${run.project}/${run.target}' as $status in state database") val now = new Timestamp(Clock.systemDefaultZone().instant().toEpochMilli) withSession{ repository => diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/history/job.scala b/flowman-core/src/main/scala/com/dimajix/flowman/history/job.scala index 1c319792d..8481258ab 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/history/job.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/history/job.scala @@ -32,7 +32,7 @@ import com.dimajix.flowman.execution.Status * @param to * @param args */ -case class JobQuery( +final case class JobQuery( namespace:Option[String] = None, project:Option[String] = None, name:Option[String] = None, @@ -44,7 +44,7 @@ case class JobQuery( ) -case class JobState( +final case class JobState( id:String, namespace:String, project:String, @@ -56,16 +56,23 @@ case class JobState( endDateTime:Option[ZonedDateTime] = None ) - -sealed case class JobOrder(isAscending:Boolean=true) { - def asc() : JobOrder = copy(true) - def desc() : JobOrder = copy(false) +sealed case class JobOrderColumn() +object JobOrderColumn { + object BY_DATETIME extends JobOrderColumn + object BY_NAME extends JobOrderColumn + object BY_ID extends JobOrderColumn + object BY_STATUS extends JobOrderColumn + object BY_PHASE extends JobOrderColumn } object JobOrder { - object BY_DATETIME extends JobOrder - object BY_NAME extends JobOrder - object BY_ID extends JobOrder - object BY_STATUS extends JobOrder - object BY_PHASE extends JobOrder + final val BY_DATETIME = JobOrder(JobOrderColumn.BY_DATETIME) + final val BY_NAME = JobOrder(JobOrderColumn.BY_NAME) + final val BY_ID = JobOrder(JobOrderColumn.BY_ID) + final val BY_STATUS = JobOrder(JobOrderColumn.BY_STATUS) + final val BY_PHASE = JobOrder(JobOrderColumn.BY_PHASE) +} +final case class JobOrder(column:JobOrderColumn, isAscending:Boolean=true) { + def asc() : JobOrder = copy(isAscending=true) + def desc() : JobOrder = copy(isAscending=false) } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/history/target.scala b/flowman-core/src/main/scala/com/dimajix/flowman/history/target.scala index 03c603eb1..5f85b9cc2 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/history/target.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/history/target.scala @@ -34,7 +34,7 @@ import com.dimajix.flowman.execution.Status * @param to * @param partitions */ -case class TargetQuery( +final case class TargetQuery( namespace:Option[String] = None, project:Option[String] = None, name:Option[String] = None, @@ -48,7 +48,7 @@ case class TargetQuery( ) -case class TargetState( +final case class TargetState( id:String, jobId:Option[String], namespace:String, @@ -62,17 +62,27 @@ case class TargetState( ) -sealed case class TargetOrder (isAscending:Boolean=true) { - def asc() : TargetOrder = copy(true) - def desc() : TargetOrder = copy(false) +sealed case class TargetOrderColumn() +object TargetOrderColumn { + object BY_DATETIME extends TargetOrderColumn + object BY_NAME extends TargetOrderColumn + object BY_ID extends TargetOrderColumn + object BY_STATUS extends TargetOrderColumn + object BY_PHASE extends TargetOrderColumn + object BY_PARENT_NAME extends TargetOrderColumn + object BY_PARENT_ID extends TargetOrderColumn } object TargetOrder { - object BY_DATETIME extends TargetOrder - object BY_NAME extends TargetOrder - object BY_ID extends TargetOrder - object BY_STATUS extends TargetOrder - object BY_PHASE extends TargetOrder - object BY_PARENT_NAME extends TargetOrder - object BY_PARENT_ID extends TargetOrder + final val BY_DATETIME = TargetOrder(TargetOrderColumn.BY_DATETIME) + final val BY_NAME = TargetOrder(TargetOrderColumn.BY_NAME) + final val BY_ID = TargetOrder(TargetOrderColumn.BY_ID) + final val BY_STATUS = TargetOrder(TargetOrderColumn.BY_STATUS) + final val BY_PHASE = TargetOrder(TargetOrderColumn.BY_PHASE) + final val BY_PARENT_NAME = TargetOrder(TargetOrderColumn.BY_PARENT_NAME) + final val BY_PARENT_ID = TargetOrder(TargetOrderColumn.BY_PARENT_ID) +} +final case class TargetOrder(column:TargetOrderColumn, isAscending:Boolean=true) { + def asc() : TargetOrder = copy(isAscending=true) + def desc() : TargetOrder = copy(isAscending=false) } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/Assertion.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/Assertion.scala new file mode 100644 index 000000000..6b54bf4b1 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/Assertion.scala @@ -0,0 +1,80 @@ +package com.dimajix.flowman.model + +import org.apache.spark.sql.DataFrame + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Execution + + +case class AssertionResult( + name:String, + valid:Boolean +) + +object Assertion { + object Properties { + def apply(context: Context, name:String = "", kind:String = "") : Properties = { + Properties( + context, + context.namespace, + context.project, + name, + kind, + Map(), + None + ) + } + } + + final case class Properties( + context:Context, + namespace:Option[Namespace], + project:Option[Project], + name:String, + kind:String, + labels:Map[String,String], + description:Option[String] + ) extends Instance.Properties[Properties] { + override def withName(name: String): Properties = copy(name=name) + } +} + + +trait Assertion extends Instance { + override def category: String = "assertion" + + /** + * Returns a description of the assertion + * @return + */ + def description : Option[String] + + /** + * Returns a list of physical resources required by this assertion. This list will only be non-empty for assertions + * which actually read from physical data. + * @return + */ + def requires : Set[ResourceIdentifier] + + /** + * Returns the dependencies (i.e. names of tables in the Dataflow model) + * @return + */ + def inputs : Seq[MappingOutputIdentifier] + + /** + * Executes this [[Assertion]] and returns a corresponding DataFrame + * + * @param execution + * @param input + * @return + */ + def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]) : Seq[AssertionResult] +} + + +abstract class BaseAssertion extends AbstractInstance with Assertion { + protected override def instanceProperties : Assertion.Properties + + override def description: Option[String] = instanceProperties.description +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/Dataset.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/Dataset.scala index 8c12fdf9f..c2064e5c4 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/model/Dataset.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/Dataset.scala @@ -20,7 +20,7 @@ import org.apache.spark.sql.DataFrame import com.dimajix.common.Trilean import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.model.Connection.Properties import com.dimajix.flowman.types.StructType @@ -75,36 +75,37 @@ trait Dataset extends Instance { /** * Returns true if the data represented by this Dataset actually exists - * @param executor + * @param execution * @return */ - def exists(executor: Executor) : Trilean + def exists(execution: Execution) : Trilean /** * Removes the data represented by this dataset, but leaves the underlying relation present - * @param executor + * @param execution */ - def clean(executor: Executor) : Unit + def clean(execution: Execution) : Unit /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @return */ - def read(executor:Executor, schema:Option[org.apache.spark.sql.types.StructType]) : DataFrame + def read(execution:Execution, schema:Option[org.apache.spark.sql.types.StructType]) : DataFrame /** * Writes data into the relation, possibly into a specific partition - * @param executor + * @param execution * @param df - dataframe to write */ - def write(executor:Executor, df:DataFrame, mode:OutputMode = OutputMode.OVERWRITE) : Unit + def write(execution:Execution, df:DataFrame, mode:OutputMode = OutputMode.OVERWRITE) : Unit /** - * Returns the schema as produced by this dataset, relative to the given input schema + * Returns the schema of this dataset that is either returned by [[read]] operations or that is expected + * by [[write]] operations. If the schema is dynamic or cannot be inferred, [[None]] is returned. * @return */ - def describe(executor:Executor) : Option[StructType] + def describe(execution:Execution) : Option[StructType] } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/Job.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/Job.scala index 60a4c49c2..7ece4af99 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/model/Job.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/Job.scala @@ -17,11 +17,12 @@ package com.dimajix.flowman.model import scala.util.control.NonFatal +import scala.util.matching.Regex import org.slf4j.LoggerFactory import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.Runner import com.dimajix.flowman.execution.Status @@ -109,7 +110,7 @@ object Job { labels: Map[String, String], description:Option[String] ) extends Instance.Properties[Properties] { - override val kind : String = "batch" + override val kind : String = "job" override def withName(name: String): Properties = copy(name=name) } @@ -279,6 +280,10 @@ final case class Job( * @return */ def instance(args:Map[String,String]) : JobInstance = { + val pargs = parameters.map(_.name).toSet + if (args.keySet != pargs) + throw new IllegalArgumentException(s"Argument mismatch for job '$identifier', expected: ${pargs.mkString(",")} received: ${args.keySet.mkString(",")}") + JobInstance( namespace.map(_.name).getOrElse(""), project.map(_.name).getOrElse(""), @@ -310,14 +315,14 @@ final case class Job( /** * Performs interpolation of given arguments as FieldValues. This will return an Iterable of argument maps each - * of them to be used by a job executor. + * of them to be used by a job execution. * @param args * @return */ def interpolate(args:Map[String,FieldValue]) : Iterable[Map[String,Any]] = { def interpolate(args:Iterable[Map[String,Any]], param:Parameter, values:FieldValue) : Iterable[Map[String,Any]] = { val vals = try { - param.ftype.interpolate(values, param.granularity) + param.interpolate(values) } catch { case NonFatal(ex) => throw new IllegalArgumentException(s"Cannot interpolate parameter '${param.name}' of job '$name' with values '$values'", ex) @@ -382,13 +387,13 @@ final case class Job( * @param force * @return */ - def execute(executor:Executor, phase:Phase, args:Map[String,String], force:Boolean=false) : Status = { + def execute(executor:Execution, phase:Phase, args:Map[String,String], targets:Seq[Regex]=Seq(".*".r), force:Boolean=false, dryRun:Boolean=false) : Status = { require(args != null) require(phase != null) require(args != null) val jobArgs = arguments(args) val jobRunner = new Runner(executor, new NullStateStore) - jobRunner.executeJob(this, Seq(phase), jobArgs, force) + jobRunner.executeJob(this, Seq(phase), jobArgs, targets, force, dryRun) } } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/Mapping.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/Mapping.scala index a0964ac7f..291130e5a 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/model/Mapping.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/Mapping.scala @@ -20,8 +20,10 @@ import org.apache.spark.sql.DataFrame import org.apache.spark.storage.StorageLevel import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.NoSuchMappingOutputException +import com.dimajix.flowman.graph.Linker +import com.dimajix.flowman.graph.MappingRef import com.dimajix.flowman.types.StructType import com.dimajix.spark.sql.DataFrameUtils @@ -54,6 +56,7 @@ object Mapping { cache:StorageLevel ) extends Instance.Properties[Properties] { override def withName(name: String): Properties = copy(name=name) + def identifier : MappingIdentifier = MappingIdentifier(name, project.map(_.name)) } } @@ -122,27 +125,33 @@ trait Mapping extends Instance { def output(name:String = "main") : MappingOutputIdentifier /** - * Executes this MappingType and returns a corresponding DataFrame + * Executes this Mapping and returns a corresponding map of DataFrames per output * - * @param executor + * @param execution * @param input * @return */ - def execute(executor:Executor, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] + def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] /** * Returns the schema as produced by this mapping, relative to the given input schema * @param input * @return */ - def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] + def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] /** * Returns the schema as produced by this mapping, relative to the given input schema * @param input * @return */ - def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType], output:String) : StructType + def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType], output:String) : StructType + + /** + * Creates all known links for building a descriptive graph of the whole data flow + * Params: linker - The linker object to use for creating new edges + */ + def link(linker:Linker) : Unit } @@ -156,7 +165,7 @@ abstract class BaseMapping extends AbstractInstance with Mapping { * Returns an identifier for this mapping * @return */ - override def identifier : MappingIdentifier = MappingIdentifier(name, project.map(_.name)) + override def identifier : MappingIdentifier = instanceProperties.identifier /** * This method should return true, if the resulting dataframe should be broadcast for map-side joins @@ -214,17 +223,17 @@ abstract class BaseMapping extends AbstractInstance with Mapping { * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) // Create dummy data frames val replacements = input.map { case (name,schema) => - name -> DataFrameUtils.singleRow(executor.spark, schema.sparkType) + name -> DataFrameUtils.singleRow(execution.spark, schema.sparkType) } // Execute mapping - val results = execute(executor, replacements) + val results = execute(execution, replacements) // Extract schemas results.map { case (name,df) => name -> StructType.of(df.schema)} @@ -236,11 +245,21 @@ abstract class BaseMapping extends AbstractInstance with Mapping { * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType], output:String) : StructType = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType], output:String) : StructType = { + require(execution != null) require(input != null) require(output != null && output.nonEmpty) - describe(executor, input)(output) + describe(execution, input)(output) + } + + /** + * Creates all known links for building a descriptive graph of the whole data flow + * Params: linker - The linker object to use for creating new edges + */ + override def link(linker:Linker) : Unit = { + inputs.foreach( in => + linker.input(in.mapping, in.output) + ) } } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/Module.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/Module.scala index d625d4895..3f4d39a50 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/model/Module.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/Module.scala @@ -16,7 +16,6 @@ package com.dimajix.flowman.model -import java.io.InputStream import java.net.URL import java.util.ServiceLoader @@ -25,6 +24,7 @@ import scala.collection.JavaConverters._ import org.slf4j.LoggerFactory import com.dimajix.flowman.hadoop.File +import com.dimajix.flowman.spi.ModuleReader object Module { @@ -106,7 +106,8 @@ final case class Module( connections : Map[String,Template[Connection]] = Map(), mappings : Map[String,Template[Mapping]] = Map(), targets : Map[String,Template[Target]] = Map(), - jobs : Map[String,Template[Job]] = Map() + jobs : Map[String,Template[Job]] = Map(), + tests : Map[String,Template[Test]] = Map() ) { /** * Creates a new dataflow by merging this one with another one. @@ -123,7 +124,8 @@ final case class Module( connections = connections ++ other.connections, mappings = mappings ++ other.mappings, targets = targets ++ other.targets, - jobs = jobs ++ other.jobs + jobs = jobs ++ other.jobs, + tests = tests ++ other.tests ) } @@ -143,19 +145,11 @@ final case class Module( relations = relations, mappings = mappings, targets = targets, - jobs = jobs + jobs = jobs, + tests = tests ) } } -abstract class ModuleReader { - def name : String - def format : String - def supports(format:String) : Boolean = this.format == format - - def file(file: File) : Module - def stream(stream: InputStream) : Module - def string(text: String): Module -} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/Namespace.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/Namespace.scala index 6310e5684..9237dc620 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/model/Namespace.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/Namespace.scala @@ -29,6 +29,7 @@ import com.dimajix.flowman.catalog.ExternalCatalog import com.dimajix.flowman.history.StateStore import com.dimajix.flowman.metric.ConsoleMetricSink import com.dimajix.flowman.metric.MetricSink +import com.dimajix.flowman.spi.NamespaceReader import com.dimajix.flowman.storage.Store @@ -99,13 +100,4 @@ final case class Namespace( } -abstract class NamespaceReader { - def name : String - def format : String - def supports(format:String) : Boolean = this.format == format - - def file(file: File) : Namespace - def stream(stream: InputStream) : Namespace - def string(text: String): Namespace -} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/Profile.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/Profile.scala index 2d6ecd129..a37dee365 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/model/Profile.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/Profile.scala @@ -19,9 +19,9 @@ package com.dimajix.flowman.model final case class Profile( name:String, - enabled:Boolean, - connections:Map[String,Template[Connection]], - config:Map[String,String], - environment:Map[String,String] + enabled:Boolean = false, + connections:Map[String,Template[Connection]] = Map(), + config:Map[String,String] = Map(), + environment:Map[String,String] = Map() ) { } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/Project.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/Project.scala index 1bbb2f9e6..7b673efd4 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/model/Project.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/Project.scala @@ -23,6 +23,7 @@ import scala.collection.JavaConverters._ import org.slf4j.LoggerFactory import com.dimajix.flowman.hadoop.File +import com.dimajix.flowman.spi.ProjectReader object Project { @@ -109,7 +110,8 @@ object Project { relations = module.relations, mappings = module.mappings, targets = module.targets, - jobs = module.jobs + jobs = module.jobs, + tests = module.tests ) } @@ -140,16 +142,6 @@ final case class Project( connections : Map[String,Template[Connection]] = Map(), mappings : Map[String,Template[Mapping]] = Map(), targets : Map[String,Template[Target]] = Map(), - jobs : Map[String,Template[Job]] = Map() + jobs : Map[String,Template[Job]] = Map(), + tests : Map[String,Template[Test]] = Map() ) - - -abstract class ProjectReader { - def name : String - def format : String - - def supports(format:String) : Boolean = this.format == format - - def file(file: File) : Project - def string(text: String): Project -} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/Relation.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/Relation.scala index 31370716e..f27d0d398 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/model/Relation.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/Relation.scala @@ -18,6 +18,14 @@ package com.dimajix.flowman.model import java.util.Locale +import com.dimajix.common.Trilean +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.execution.OutputMode +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.FieldValue +import com.dimajix.flowman.types.SingleValue +import com.dimajix.flowman.util.SchemaUtils import org.apache.hadoop.fs.Path import org.apache.spark.sql.DataFrame import org.apache.spark.sql.DataFrameReader @@ -29,17 +37,9 @@ import org.apache.spark.sql.streaming.DataStreamReader import org.apache.spark.sql.streaming.DataStreamWriter import org.apache.spark.sql.streaming.StreamingQuery import org.apache.spark.sql.streaming.{OutputMode => StreamOutputMode} -import org.apache.spark.sql.types.StructType -import com.dimajix.common.Trilean -import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor -import com.dimajix.flowman.execution.OutputMode -import com.dimajix.flowman.model.Dataset.Properties -import com.dimajix.flowman.types.Field -import com.dimajix.flowman.types.FieldValue -import com.dimajix.flowman.types.SingleValue -import com.dimajix.flowman.util.SchemaUtils +import com.dimajix.flowman.graph.Linker +import com.dimajix.flowman.types.StructType object Relation { @@ -52,8 +52,7 @@ object Relation { name, "", Map(), - None, - Map() + None ) } } @@ -64,11 +63,11 @@ object Relation { name:String, kind:String, labels:Map[String,String], - description:Option[String], - options:Map[String,String] + description:Option[String] ) extends Instance.Properties[Properties] { override def withName(name: String): Properties = copy(name=name) + def identifier : RelationIdentifier = RelationIdentifier(name, project.map(_.name)) } } @@ -137,85 +136,99 @@ trait Relation extends Instance { def fields : Seq[Field] = schema.toSeq.flatMap(_.fields) ++ partitions.map(_.field) /** + * Returns the schema of the relation, either from an explicitly specified schema or by schema inference from + * the physical source + * @param execution + * @return + */ + def describe(execution:Execution) : StructType + + /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @param partitions - List of partitions. If none are specified, all the data will be read * @return */ - def read(executor:Executor, schema:Option[StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame + def read(execution:Execution, schema:Option[org.apache.spark.sql.types.StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame /** * Writes data into the relation, possibly into a specific partition - * @param executor + * @param execution * @param df - dataframe to write * @param partition - destination partition */ - def write(executor:Executor, df:DataFrame, partition:Map[String,SingleValue] = Map(), mode:OutputMode = OutputMode.OVERWRITE) : Unit + def write(execution:Execution, df:DataFrame, partition:Map[String,SingleValue] = Map(), mode:OutputMode = OutputMode.OVERWRITE) : Unit /** * Removes one or more partitions. - * @param executor + * @param execution * @param partitions */ - def truncate(executor:Executor, partitions:Map[String,FieldValue] = Map()) : Unit + def truncate(execution:Execution, partitions:Map[String,FieldValue] = Map()) : Unit /** * Reads data from a streaming source - * @param executor + * @param execution * @param schema * @return */ - def readStream(executor:Executor, schema:Option[StructType]) : DataFrame = ??? + def readStream(execution:Execution, schema:Option[org.apache.spark.sql.types.StructType]) : DataFrame = ??? /** * Writes data to a streaming sink - * @param executor + * @param execution * @param df * @return */ - def writeStream(executor:Executor, df:DataFrame, mode:OutputMode, checkpointLocation:Path) : StreamingQuery = ??? + def writeStream(execution:Execution, df:DataFrame, mode:OutputMode, checkpointLocation:Path) : StreamingQuery = ??? /** * Returns true if the relation already exists, otherwise it needs to be created prior usage. This refers to * the relation itself, not to the data or a specific partition. [[loaded]] should return [[Yes]] after * [[[create]] has been called, and it should return [[No]] after [[destroy]] has been called. * - * @param executor + * @param execution * @return */ - def exists(executor:Executor) : Trilean + def exists(execution:Execution) : Trilean /** * Returns true if the target partition exists and contains valid data. Absence of a partition indicates that a * [[write]] is required for getting up-to-date contents. A [[write]] with output mode * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition - * @param executor + * @param execution * @param partition * @return */ - def loaded(executor:Executor, partition:Map[String,SingleValue] = Map()) : Trilean + def loaded(execution:Execution, partition:Map[String,SingleValue] = Map()) : Trilean /** * This method will physically create the corresponding relation. This might be a Hive table or a directory. The * relation will not contain any data, but all metadata will be processed - * @param executor + * @param execution */ - def create(executor:Executor, ifNotExists:Boolean=false) : Unit + def create(execution:Execution, ifNotExists:Boolean=false) : Unit /** * This will delete any physical representation of the relation. Depending on the type only some meta data like * a Hive table might be dropped or also the physical files might be deleted - * @param executor + * @param execution */ - def destroy(executor:Executor, ifExists:Boolean=false) : Unit + def destroy(execution:Execution, ifExists:Boolean=false) : Unit /** * This will update any existing relation to the specified metadata. - * @param executor + * @param execution */ - def migrate(executor:Executor) : Unit + def migrate(execution:Execution) : Unit + + /** + * Creates all known links for building a descriptive graph of the whole data flow + * Params: linker - The linker object to use for creating new edges + */ + def link(linker:Linker) : Unit } @@ -230,7 +243,7 @@ abstract class BaseRelation extends AbstractInstance with Relation { * Returns an identifier for this relation * @return */ - override def identifier : RelationIdentifier = RelationIdentifier(name, project.map(_.name)) + override def identifier : RelationIdentifier = instanceProperties.identifier /** * Returns a description for the relation @@ -251,20 +264,38 @@ abstract class BaseRelation extends AbstractInstance with Relation { override def partitions : Seq[PartitionField] = Seq() /** - * Returns a map of all options. There is no specific usage for options, that depends on the - * specific implementation + * Returns the schema of the relation, either from an explicitly specified schema or by schema inference from + * the physical source + * @param execution * @return */ - def options : Map[String,String] = instanceProperties.options + override def describe(execution:Execution) : StructType = { + if (fields.nonEmpty) { + // Use given fields if relation contains valid list of fields + StructType(fields) + } + else { + // Otherwise let Spark infer the schema + val df = read(execution, None) + StructType.of(df.schema) + } + } + + /** + * Creates all known links for building a descriptive graph of the whole data flow + * Params: linker - The linker object to use for creating new edges + */ + def link(linker:Linker) : Unit = {} /** - * Creates a DataFrameReader which is already configured with options and the schema is also - * already included - * @param executor + * Creates a DataFrameReader which is already configured with the schema + * @param execution * @return */ - protected def reader(executor:Executor) : DataFrameReader = { - val reader = executor.spark.read.options(options) + protected def reader(execution:Execution, format:String, options:Map[String,String]) : DataFrameReader = { + val reader = execution.spark.read + .format(format) + .options(options) inputSchema.foreach(s => reader.schema(s)) @@ -272,13 +303,14 @@ abstract class BaseRelation extends AbstractInstance with Relation { } /** - * Creates a DataStreamReader which is already configured with options and the schema is also - * already included - * @param executor + * Creates a DataStreamReader which is already configured + * @param execution * @return */ - protected def streamReader(executor: Executor) : DataStreamReader = { - val reader = executor.spark.readStream.options(options) + protected def streamReader(execution: Execution, format:String, options:Map[String,String]) : DataStreamReader = { + val reader = execution.spark.readStream + .format(format) + .options(options) inputSchema.foreach(s => reader.schema(s)) @@ -288,13 +320,14 @@ abstract class BaseRelation extends AbstractInstance with Relation { /** * Ceates a DataFrameWriter which is already configured with any options. Moreover * the desired schema of the relation is also applied to the DataFrame - * @param executor + * @param execution * @param df * @return */ - protected def writer(executor: Executor, df:DataFrame, saveMode:SaveMode) : DataFrameWriter[Row] = { - applyOutputSchema(executor, df) + protected def writer(execution: Execution, df:DataFrame, format:String, options:Map[String,String], saveMode:SaveMode) : DataFrameWriter[Row] = { + applyOutputSchema(execution, df) .write + .format(format) .options(options) .mode(saveMode) } @@ -302,41 +335,57 @@ abstract class BaseRelation extends AbstractInstance with Relation { /** * Ceates a DataStreamWriter which is already configured with any options. Moreover * the desired schema of the relation is also applied to the DataFrame - * @param executor + * @param execution * @param df * @return */ - protected def streamWriter(executor: Executor, df:DataFrame, outputMode:StreamOutputMode, checkpointLocation:Path) : DataStreamWriter[Row]= { - val outputDf = applyOutputSchema(executor, df) + protected def streamWriter(execution: Execution, df:DataFrame, format:String, options:Map[String,String], outputMode:StreamOutputMode, checkpointLocation:Path) : DataStreamWriter[Row]= { + val outputDf = applyOutputSchema(execution, df) outputDf.writeStream + .format(format) .options(options) .option("checkpointLocation", checkpointLocation.toString) .outputMode(outputMode) } /** - * Creates a Spark schema from the list of fields. + * Returns the schema that will be used internally when reading from this data source. This schema should match the + * user specified schema and will be applied in read operations. This should include the partition column whenever + * the source returns it. + * @return + */ + protected def inputSchema : Option[org.apache.spark.sql.types.StructType] = { + schema.map(_.sparkSchema) + } + + /** + * Applies the input schema (or maybe even transforms it). This should include partitions only if they are + * required in read operations. + * @param df * @return */ - protected def inputSchema : Option[StructType] = { - schema.map(s => StructType(s.fields.map(_.sparkField))) + protected def applyInputSchema(df:DataFrame) : DataFrame = { + SchemaUtils.applySchema(df, inputSchema, insertNulls=false) } /** - * Creates a Spark schema from the list of fields. The list is used for output operations, i.e. for writing + * Returns the Spark schema as it is expected from the physical relation for write operations. The list is used + * for output operations, i.e. for writing. This should include partitions only if they are required for write + * operations. * @return */ - protected def outputSchema : Option[StructType] = { - schema.map(s => s.sparkSchema) + protected def outputSchema(execution:Execution) : Option[org.apache.spark.sql.types.StructType] = { + schema.map(_.sparkSchema) } /** - * Applies the specified schema (or maybe even transforms it) + * Applies the output schema (or maybe even transforms it). This should include partitions only if they are + * required for write operations. * @param df * @return */ - protected def applyOutputSchema(executor:Executor, df:DataFrame) : DataFrame = { - SchemaUtils.applySchema(df, outputSchema) + protected def applyOutputSchema(execution:Execution, df:DataFrame) : DataFrame = { + SchemaUtils.applySchema(df, outputSchema(execution)) } } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/ResourceIdentifier.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/ResourceIdentifier.scala index 0753c2422..d715e9679 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/model/ResourceIdentifier.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/ResourceIdentifier.scala @@ -29,15 +29,15 @@ import com.dimajix.flowman.hadoop.GlobPattern object ResourceIdentifier { def ofFile(file:Path) = GlobbingResourceIdentifier("file", file.toString) - def ofLocal(file:Path) = GlobbingResourceIdentifier("local", file.toString) - def ofLocal(file:File) = GlobbingResourceIdentifier("local", file.toURI.toString) + def ofLocal(file:Path) = GlobbingResourceIdentifier("local", file.toUri.getPath) + def ofLocal(file:File) = GlobbingResourceIdentifier("local", file.toURI.getPath) def ofHiveDatabase(database:String) = RegexResourceIdentifier("hiveDatabase", database) def ofHiveTable(table:String) = RegexResourceIdentifier("hiveTable", table) def ofHiveTable(table:String, database:Option[String]) = RegexResourceIdentifier("hiveTable", fqTable(table, database)) def ofHivePartition(table:String, database:Option[String], partition:Map[String,Any]) = RegexResourceIdentifier("hiveTablePartition", fqTable(table, database), partition.map { case(k,v) => k -> v.toString }) def ofJdbcDatabase(database:String) = RegexResourceIdentifier("jdbcDatabase", database) def ofJdbcTable(table:String, database:Option[String]) = RegexResourceIdentifier("jdbcTable", fqTable(table, database)) - def ofJdbcTablePartition(table:String, database:Option[String], partition:Map[String,Any]) = RegexResourceIdentifier("jdbcTable", fqTable(table, database), partition.map { case(k,v) => k -> v.toString }) + def ofJdbcTablePartition(table:String, database:Option[String], partition:Map[String,Any]) = RegexResourceIdentifier("jdbcTablePartition", fqTable(table, database), partition.map { case(k,v) => k -> v.toString }) def ofURL(url:URL) = RegexResourceIdentifier("url", url.toString) private def fqTable(table:String, database:Option[String]) : String = database.map(_ + ".").getOrElse("") + table diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/Target.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/Target.scala index b91827b87..637636ac4 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/model/Target.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/Target.scala @@ -19,8 +19,9 @@ package com.dimajix.flowman.model import com.dimajix.common.Trilean import com.dimajix.common.Unknown import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase +import com.dimajix.flowman.graph.Linker /** * @@ -76,6 +77,7 @@ object Target { after: Seq[TargetIdentifier] ) extends Instance.Properties[Properties] { override def withName(name: String): Properties = copy(name=name) + def identifier : TargetIdentifier = TargetIdentifier(name, project.map(_.name)) } } @@ -135,18 +137,24 @@ trait Target extends Instance { /** * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. - * @param executor + * @param execution * @param phase * @return */ - def dirty(executor: Executor, phase: Phase) : Trilean + def dirty(execution: Execution, phase: Phase) : Trilean /** * Executes a specific phase of this target - * @param executor + * @param execution * @param phase */ - def execute(executor: Executor, phase: Phase) : Unit + def execute(execution: Execution, phase: Phase) : Unit + + /** + * Creates all known links for building a descriptive graph of the whole data flow + * Params: linker - The linker object to use for creating new edges + */ + def link(linker:Linker) : Unit } @@ -159,7 +167,7 @@ abstract class BaseTarget extends AbstractInstance with Target { * Returns an identifier for this target * @return */ - override def identifier : TargetIdentifier = TargetIdentifier(name, project.map(_.name)) + override def identifier : TargetIdentifier = instanceProperties.identifier /** * Returns an instance representing this target with the context @@ -192,7 +200,7 @@ abstract class BaseTarget extends AbstractInstance with Target { * Returns all phases which are implemented by this target in the execute method * @return */ - override def phases : Set[Phase] = Set(Phase.CREATE, Phase.BUILD, Phase.VERIFY, Phase.TRUNCATE, Phase.DESTROY) + override def phases : Set[Phase] = Set(Phase.VALIDATE, Phase.CREATE, Phase.BUILD, Phase.VERIFY, Phase.TRUNCATE, Phase.DESTROY) /** * Returns a list of physical resources produced by this target @@ -213,34 +221,47 @@ abstract class BaseTarget extends AbstractInstance with Target { * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = Unknown + override def dirty(execution: Execution, phase: Phase): Trilean = Unknown /** * Executes a specific phase of this target * - * @param executor + * @param execution * @param phase */ - override def execute(executor: Executor, phase: Phase) : Unit = { + override def execute(execution: Execution, phase: Phase) : Unit = { phase match { - case Phase.CREATE => create(executor) - case Phase.BUILD => build(executor) - case Phase.VERIFY => verify(executor) - case Phase.TRUNCATE => truncate(executor) - case Phase.DESTROY => destroy(executor) + case Phase.VALIDATE => validate(execution) + case Phase.CREATE => create(execution) + case Phase.BUILD => build(execution) + case Phase.VERIFY => verify(execution) + case Phase.TRUNCATE => truncate(execution) + case Phase.DESTROY => destroy(execution) } } + /** + * Creates all known links for building a descriptive graph of the whole data flow + * Params: linker - The linker object to use for creating new edges + */ + override def link(linker:Linker) : Unit = {} + + /** + * Performs validation before execution. This might be a good point in time to validate any + * assumption on data sources + */ + protected def validate(executor:Execution) : Unit = {} + /** * Creates the resource associated with this target. This may be a Hive table or a JDBC table. This method * will not provide the data itself, it will only create the container * @param executor */ - protected def create(executor:Executor) : Unit = {} + protected def create(executor:Execution) : Unit = {} /** * Abstract method which will perform the output operation. All required tables need to be @@ -248,26 +269,26 @@ abstract class BaseTarget extends AbstractInstance with Target { * * @param executor */ - protected def build(executor:Executor) : Unit = {} + protected def build(executor:Execution) : Unit = {} /** * Performs a verification of the build step or possibly other checks. * * @param executor */ - protected def verify(executor: Executor) : Unit = {} + protected def verify(executor: Execution) : Unit = {} /** * Deletes data of a specific target * * @param executor */ - protected def truncate(executor:Executor) : Unit = {} + protected def truncate(executor:Execution) : Unit = {} /** * Completely destroys the resource associated with this target. This will delete both the phyiscal data and * the table definition * @param executor */ - protected def destroy(executor:Executor) : Unit = {} + protected def destroy(executor:Execution) : Unit = {} } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/Test.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/Test.scala new file mode 100644 index 000000000..92a2701e9 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/Test.scala @@ -0,0 +1,100 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.model + +import com.dimajix.flowman.execution.Context + + +object Test { + object Properties { + def apply(context: Context, name: String = ""): Properties = { + Properties( + context, + context.namespace, + context.project, + name, + Map(), + None + ) + } + } + final case class Properties( + context: Context, + namespace:Option[Namespace], + project:Option[Project], + name: String, + labels: Map[String, String], + description:Option[String] + ) extends Instance.Properties[Properties] { + override val kind : String = "test" + override def withName(name: String): Properties = copy(name=name) + } + + /** + * Creates a new [[Test]] from an existing test and a list of parent tests + * @param test + * @param parents + * @return + */ + def merge(test:Test, parents:Seq[Test]) : Test = { + val parentEnvironment = parents + .map(test => test.environment) + .reduceOption((envs, elems) => envs ++ elems) + .getOrElse(Map()) + val parentTargets = parents + .map(test => test.targets.toSet) + .reduceOption((targets, elems) => targets ++ elems) + .getOrElse(Set()) + + val allEnvironment = parentEnvironment ++ test.environment + + val allTargets = parentTargets ++ test.targets + val allRelationMocks = parents.foldLeft(Map[String,Template[Relation]]())((f,t) => f ++ t.overrideRelations) ++ test.overrideRelations + val allMappingMocks = parents.foldLeft(Map[String,Template[Mapping]]())((f,t) => f ++ t.overrideMappings) ++ test.overrideMappings + val allFixtures = parents.foldLeft(Map[String,Template[Target]]())((f,t) => f ++ t.fixtures) ++ test.fixtures + val allAssertions = parents.foldLeft(Map[String,Template[Assertion]]())((f,t) => f ++ t.assertions) ++ test.assertions + + Test( + test.instanceProperties, + allEnvironment, + allTargets.toSeq, + allRelationMocks, + allMappingMocks, + allFixtures, + allAssertions + ) + } +} + + +final case class Test( + instanceProperties:Test.Properties, + environment:Map[String,String] = Map(), + targets:Seq[TargetIdentifier] = Seq(), + + overrideRelations:Map[String,Template[Relation]] = Map(), + overrideMappings:Map[String,Template[Mapping]] = Map(), + fixtures:Map[String,Template[Target]] = Map(), + assertions:Map[String,Template[Assertion]] = Map() +) extends AbstractInstance { + override def category: String = "test" + override def kind : String = "test" + + def identifier : TestIdentifier = TestIdentifier(name, project.map(_.name)) + + def description : Option[String] = instanceProperties.description +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/model/package.scala b/flowman-core/src/main/scala/com/dimajix/flowman/model/package.scala index 159cc4aa3..8449d0b8f 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/model/package.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/model/package.scala @@ -23,10 +23,12 @@ package object model { type RelationIdentifier = Identifier[Relation] type TargetIdentifier = Identifier[Target] type JobIdentifier = Identifier[Job] + type TestIdentifier = Identifier[Test] object MappingIdentifier extends IdentifierFactory[Mapping] object ConnectionIdentifier extends IdentifierFactory[Connection] object RelationIdentifier extends IdentifierFactory[Relation] object TargetIdentifier extends IdentifierFactory[Target] object JobIdentifier extends IdentifierFactory[Job] + object TestIdentifier extends IdentifierFactory[Test] } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/spi/ClassAnnotationHandler.scala b/flowman-core/src/main/scala/com/dimajix/flowman/spi/ClassAnnotationHandler.scala new file mode 100644 index 000000000..2853e9d81 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/spi/ClassAnnotationHandler.scala @@ -0,0 +1,24 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spi + + +trait ClassAnnotationHandler { + def annotation: Class[_] + + def register(clazz: Class[_]): Unit +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/spi/ClassAnnotationScanner.scala b/flowman-core/src/main/scala/com/dimajix/flowman/spi/ClassAnnotationScanner.scala index 2939267a5..347c04a7a 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/spi/ClassAnnotationScanner.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/spi/ClassAnnotationScanner.scala @@ -29,11 +29,7 @@ import com.dimajix.flowman.plugin.Plugin import com.dimajix.flowman.plugin.PluginListener -trait ClassAnnotationHandler { - def annotation : Class[_] - def register(clazz:Class[_]) : Unit -} class ClassAnnotationScanner diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/spi/LogFilter.scala b/flowman-core/src/main/scala/com/dimajix/flowman/spi/LogFilter.scala new file mode 100644 index 000000000..ac3970e68 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/spi/LogFilter.scala @@ -0,0 +1,40 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spi + +import java.util.ServiceLoader +import scala.collection.JavaConverters._ + + +object LogFilter { + def filters : Seq[LogFilter] = { + val loader = ServiceLoader.load(classOf[LogFilter]) + loader.iterator().asScala.toSeq + } +} + +abstract class LogFilter { + /** + * This method gets called for every config key/value. The method can either return a redacted key/value, which + * then get logged instead of the original key/value. Or the method may return None, which means that no log + * is to be produced at all. + * @param key + * @param value + * @return + */ + def filterConfig(key:String, value:String) : Option[(String,String)] +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/spi/ModuleReader.scala b/flowman-core/src/main/scala/com/dimajix/flowman/spi/ModuleReader.scala new file mode 100644 index 000000000..dd85a1e6d --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/spi/ModuleReader.scala @@ -0,0 +1,65 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spi + +import java.io.InputStream + +import com.dimajix.flowman.hadoop.File +import com.dimajix.flowman.model.Module + + +abstract class ModuleReader { + /** + * Returns the human readable name of the module file format + * @return + */ + def name: String + + /** + * Returns the internally used short name of the module file format + * @return + */ + def format: String + + /** + * Returns true if a given format is supported by this reader + * @param format + * @return + */ + def supports(format: String): Boolean = this.format == format + + /** + * Loads a Module from the given file + * @param file + * @return + */ + def file(file: File): Module + + /** + * Loads a Module from the given InputStream + * @param file + * @return + */ + def stream(stream: InputStream): Module + + /** + * Loads a Module from the given String + * @param file + * @return + */ + def string(text: String): Module +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/spi/NamespaceReader.scala b/flowman-core/src/main/scala/com/dimajix/flowman/spi/NamespaceReader.scala new file mode 100644 index 000000000..f21d68548 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/spi/NamespaceReader.scala @@ -0,0 +1,65 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spi + +import java.io.File +import java.io.InputStream + +import com.dimajix.flowman.model.Namespace + + +abstract class NamespaceReader { + /** + * Returns the human readable name of the namespace file format + * @return + */ + def name: String + + /** + * Returns the internally used short name of the namespace file format + * @return + */ + def format: String + + /** + * Returns true if a given format is supported by this reader + * @param format + * @return + */ + def supports(format: String): Boolean = this.format == format + + /** + * Loads a Namespace from the given file + * @param file + * @return + */ + def file(file: File): Namespace + + /** + * Loads a Namespace from the given InputStream + * @param file + * @return + */ + def stream(stream: InputStream): Namespace + + /** + * Loads a Namespace from the given String + * @param file + * @return + */ + def string(text: String): Namespace +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/spi/ProjectReader.scala b/flowman-core/src/main/scala/com/dimajix/flowman/spi/ProjectReader.scala new file mode 100644 index 000000000..8b1179494 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/spi/ProjectReader.scala @@ -0,0 +1,56 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spi + +import com.dimajix.flowman.hadoop.File +import com.dimajix.flowman.model.Project + + +abstract class ProjectReader { + /** + * Returns the human readable name of the project file format + * @return + */ + def name: String + + /** + * Returns the internally used short name of the project file format + * @return + */ + def format: String + + /** + * Returns true if a given format is supported by this reader + * @param format + * @return + */ + def supports(format: String): Boolean = this.format == format + + /** + * Loads a Project from the given file + * @param file + * @return + */ + def file(file: File): Project + + /** + * Loads a Project from the given String + * @param file + * @return + */ + def string(text: String): Project +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/spi/SparkExtension.scala b/flowman-core/src/main/scala/com/dimajix/flowman/spi/SparkExtension.scala new file mode 100644 index 000000000..f8a9b8a6d --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/spi/SparkExtension.scala @@ -0,0 +1,47 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spi + +import java.util.ServiceLoader + +import scala.collection.JavaConverters._ + +import org.apache.spark.sql.SparkSession + + +object SparkExtension { + def extensions : Seq[SparkExtension] = { + val loader = ServiceLoader.load(classOf[SparkExtension]) + loader.iterator().asScala.toSeq + } +} + +abstract class SparkExtension { + /** + * Hook for extending a Spark session before it is built + * @param builder + * @return + */ + def register(builder:SparkSession.Builder) : SparkSession.Builder + + /** + * Hook for extending an existing Spark session + * @param session + * @return + */ + def register(session:SparkSession) : SparkSession +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/spi/TemplateObjectHandler.scala b/flowman-core/src/main/scala/com/dimajix/flowman/spi/TemplateObjectHandler.scala new file mode 100644 index 000000000..531a6abfc --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/spi/TemplateObjectHandler.scala @@ -0,0 +1,27 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spi + +import com.dimajix.flowman.annotation.TemplateObject +import com.dimajix.flowman.templating.Velocity + + +class TemplateObjectHandler extends ClassAnnotationHandler { + override def annotation: Class[_] = classOf[TemplateObject] + + override def register(clazz: Class[_]): Unit = Velocity.addClass(clazz.getAnnotation(classOf[TemplateObject]).name(), clazz) +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/templating/Velocity.scala b/flowman-core/src/main/scala/com/dimajix/flowman/templating/Velocity.scala index 0ac01e783..af4dbd4e5 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/templating/Velocity.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/templating/Velocity.scala @@ -106,8 +106,4 @@ object Velocity { } -class TemplateObjectHandler extends ClassAnnotationHandler { - override def annotation: Class[_] = classOf[TemplateObject] - override def register(clazz: Class[_]): Unit = Velocity.addClass(clazz.getAnnotation(classOf[TemplateObject]).name(), clazz) -} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/transforms/SchemaEnforcer.scala b/flowman-core/src/main/scala/com/dimajix/flowman/transforms/SchemaEnforcer.scala index 37b57c9c5..f782edbed 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/transforms/SchemaEnforcer.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/transforms/SchemaEnforcer.scala @@ -28,16 +28,8 @@ import org.apache.spark.sql.types.DataType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType +import com.dimajix.flowman.types.FieldType import com.dimajix.spark.sql.functions.nullable_struct -import com.dimajix.flowman.util.SchemaUtils - - -object SchemaEnforcer { - def apply(columns:Seq[(String,String)]) : SchemaEnforcer = { - val schema = StructType(columns.map(nt => StructField(nt._1, SchemaUtils.mapType(nt._2)))) - SchemaEnforcer(schema) - } -} case class SchemaEnforcer(schema:StructType) { diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/transforms/exceptions.scala b/flowman-core/src/main/scala/com/dimajix/flowman/transforms/exceptions.scala new file mode 100644 index 000000000..cf259f34b --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/transforms/exceptions.scala @@ -0,0 +1,26 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.transforms + + +class AnalysisException(val message: String,val cause: Option[Throwable] = None) + extends Exception(message, cause.orNull) { + +} + +class NoSuchColumnException(column:String) + extends AnalysisException(s"Column '$column' not found") diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/transforms/schema/Tree.scala b/flowman-core/src/main/scala/com/dimajix/flowman/transforms/schema/Tree.scala index ab6f311fe..7b4580d19 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/transforms/schema/Tree.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/transforms/schema/Tree.scala @@ -18,6 +18,8 @@ package com.dimajix.flowman.transforms.schema import java.util.Locale +import com.dimajix.flowman.transforms.NoSuchColumnException + object Path { val empty = Path(Seq()) @@ -313,9 +315,18 @@ case class LeafNode[T](name:String, value:T, nullable:Boolean=true, metadata:Map */ override def transform(fn:Node[T] => Node[T]) : Node[T] = fn(this) - override def drop(path:Path) : LeafNode[T] = this + override def drop(path:Path) : LeafNode[T] = { + require(path.segments.nonEmpty) + throw new NoSuchColumnException(path.toString) + } - override def keep(paths:Seq[Path]) : LeafNode[T] = this + override def keep(paths:Seq[Path]) : LeafNode[T] = { + paths.foreach { path => + if (path.nonEmpty) + throw new NoSuchColumnException(path.toString) + } + this + } } @@ -462,7 +473,7 @@ case class StructNode[T](name:String, value:Option[T], children:Seq[Node[T]], nu * @return */ override def drop(path:Path) : StructNode[T] = { - require(path.segments.nonEmpty) + require(path.nonEmpty) val segments = path.segments val head = segments.head.toLowerCase(Locale.ROOT) val tail = segments.tail @@ -489,24 +500,29 @@ case class StructNode[T](name:String, value:Option[T], children:Seq[Node[T]], nu * @return */ override def keep(paths:Seq[Path]) : StructNode[T] = { - if (paths.exists(p => p.segments.isEmpty || p.segments.head == "*")) { - // Special case: One path was empty, which implies we keep everything + require(paths.forall(_.nonEmpty)) + if (paths.exists(_.segments.head == "*")) { + // Special case: One path includes everything, which implies we keep everything this } else { val ht = paths.foldLeft(Map[String,Seq[Path]]()) { (map, path) => val head = path.segments.head if (contains(head)) { - val tail = Path(path.segments.tail) + val tail = path.tail val paths = map.get(head).map(_ :+ tail).getOrElse(Seq(tail)) map.updated(head, paths) } else { - map + throw new NoSuchColumnException(head) } } val newChildren = ht.map { case (head, tails) => - get(head).get.keep(tails) + val child = get(head).get + if (tails.exists(p => p.isEmpty || p.segments.head == "*")) + child + else + child.keep(tails) } replaceChildren(newChildren.filter(_.nonEmpty).toSeq) } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/types/AvroSchemaUtils.scala b/flowman-core/src/main/scala/com/dimajix/flowman/types/AvroSchemaUtils.scala index 46c81feda..c3c8ba6f5 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/types/AvroSchemaUtils.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/types/AvroSchemaUtils.scala @@ -41,9 +41,13 @@ import org.codehaus.jackson.node.IntNode import org.codehaus.jackson.node.LongNode import org.codehaus.jackson.node.NullNode import org.codehaus.jackson.node.TextNode +import org.slf4j.LoggerFactory +class AvroSchemaUtils object AvroSchemaUtils { + private val logger = LoggerFactory.getLogger(classOf[AvroSchemaUtils]) + /** * Convert a list of Flowman fields to an Avro (record) schema. Note that this logic should be compatible * to the Spark-Avro implementation! @@ -92,7 +96,7 @@ object AvroSchemaUtils { //case DurationType => case TimestampType => ASchema.create(LONG) - case DateType => ASchema.create(LONG) + case DateType => ASchema.create(INT) case DecimalType(p,s) => ASchema.create(STRING) case _ => throw new IllegalArgumentException(s"Type $ftype not supported in Avro schema") } @@ -129,60 +133,74 @@ object AvroSchemaUtils { * @param schema * @return */ - def fromAvro(schema: ASchema) : Seq[Field] = { + def fromAvro(schema: ASchema, forceNullable:Boolean=false) : Seq[Field] = { if (schema.getType != RECORD) throw new UnsupportedOperationException("Unexpected Avro top level type") - schema.getFields.asScala.map(AvroSchemaUtils.fromAvro) + schema.getFields.asScala.map(f => AvroSchemaUtils.fromAvro(f, forceNullable)) } - def fromAvro(field: AField) : Field = { - val (ftype,nullable) = fromAvroType(field.schema()) + def fromAvro(field: AField, forceNullable:Boolean) : Field = { + val (ftype,nullable) = fromAvroType(field.schema(), forceNullable) Field(field.name(), ftype, nullable, Option(field.doc())) } - private def fromAvroType(schema: ASchema): (FieldType,Boolean) = { + private def fromAvroType(schema: ASchema, forceNullable:Boolean): (FieldType,Boolean) = { schema.getType match { - case INT => (IntegerType, false) - case STRING => (StringType, false) - case BOOLEAN => (BooleanType, false) - case BYTES => (BinaryType, false) - case DOUBLE => (DoubleType, false) - case FLOAT => (FloatType, false) - case LONG => (LongType, false) - case FIXED => (BinaryType, false) - case ENUM => (StringType, false) + case INT => + Option(schema.getProp("logicalType")) match { + case Some("date") => (DateType, forceNullable) + case None => (IntegerType, forceNullable) + case Some(lt) => + logger.warn(s"Avro logical type '$lt' of type 'INT' not supported - simply using INT") + (IntegerType, forceNullable) + } + case STRING => (StringType, forceNullable) + case BOOLEAN => (BooleanType, forceNullable) + case BYTES => (BinaryType, forceNullable) + case DOUBLE => (DoubleType, forceNullable) + case FLOAT => (FloatType, forceNullable) + case LONG => + Option(schema.getProp("logicalType")) match { + case Some("timestamp-millis") => (TimestampType, forceNullable) + case None => (LongType, forceNullable) + case Some(lt) => + logger.warn(s"Avro logical type '$lt' of type 'LONG' not supported - simply using LONG") + (LongType, forceNullable) + } + case FIXED => (BinaryType, forceNullable) + case ENUM => (StringType, forceNullable) case RECORD => val fields = schema.getFields.asScala.map { f => - val (schemaType,nullable) = fromAvroType(f.schema()) + val (schemaType,nullable) = fromAvroType(f.schema(), forceNullable) Field(f.name, schemaType, nullable, Option(f.doc())) } - (StructType(fields), false) + (StructType(fields), forceNullable) case ARRAY => - val (schemaType, nullable) = fromAvroType(schema.getElementType) - (ArrayType(schemaType, nullable), false) + val (schemaType, nullable) = fromAvroType(schema.getElementType, forceNullable) + (ArrayType(schemaType, nullable), forceNullable) case MAP => - val (schemaType, nullable) = fromAvroType(schema.getValueType) - (MapType(StringType, schemaType, nullable), false) + val (schemaType, nullable) = fromAvroType(schema.getValueType, forceNullable) + (MapType(StringType, schemaType, nullable), forceNullable) case UNION => if (schema.getTypes.asScala.exists(_.getType == NULL)) { // In case of a union with null, eliminate it and make a recursive call val remainingUnionTypes = schema.getTypes.asScala.filterNot(_.getType == NULL) if (remainingUnionTypes.size == 1) { - (fromAvroType(remainingUnionTypes.head)._1, true) + (fromAvroType(remainingUnionTypes.head, forceNullable)._1, true) } else { - (fromAvroType(ASchema.createUnion(remainingUnionTypes.asJava))._1, true) + (fromAvroType(ASchema.createUnion(remainingUnionTypes.asJava), forceNullable)._1, true) } } else schema.getTypes.asScala.map(_.getType) match { case Seq(t1) => - fromAvroType(schema.getTypes.get(0)) + fromAvroType(schema.getTypes.get(0), forceNullable) case Seq(t1, t2) if Set(t1, t2) == Set(INT, LONG) => - (LongType, false) + (LongType, forceNullable) case Seq(t1, t2) if Set(t1, t2) == Set(FLOAT, DOUBLE) => - (DoubleType, false) + (DoubleType, forceNullable) case other => throw new UnsupportedOperationException( s"This mix of union types is not supported: $other") } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/types/ByteType.scala b/flowman-core/src/main/scala/com/dimajix/flowman/types/ByteType.scala index 08d74bd9c..7a017b3ed 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/types/ByteType.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/types/ByteType.scala @@ -22,5 +22,6 @@ import org.apache.spark.sql.types.DataType case object ByteType extends IntegralType[Byte] { protected def parseRaw(value:String) : Byte = value.toByte + override def sqlType: String = "tinyint" override def sparkType : DataType = org.apache.spark.sql.types.ByteType } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/types/FieldType.scala b/flowman-core/src/main/scala/com/dimajix/flowman/types/FieldType.scala index 6de2ebe46..684323702 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/types/FieldType.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/types/FieldType.scala @@ -39,7 +39,7 @@ object FieldType { Seq(NullType, DateType, TimestampType, BinaryType, IntegerType, BooleanType, LongType, DoubleType, FloatType, ShortType, ByteType, StringType, CalendarIntervalType, DurationType) .map(t => t.sqlType -> t).toMap ++ - Map("int" -> IntegerType, "text" -> StringType) + Map("byte" -> ByteType, "short" -> ShortType, "long" -> LongType, "int" -> IntegerType, "text" -> StringType) } private val FIXED_DECIMAL = """decimal\(\s*(\d+)\s*,\s*(\-?\d+)\s*\)""".r diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/types/LongType.scala b/flowman-core/src/main/scala/com/dimajix/flowman/types/LongType.scala index 455035650..b5ce15e0b 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/types/LongType.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/types/LongType.scala @@ -22,5 +22,6 @@ import org.apache.spark.sql.types.DataType case object LongType extends IntegralType[Long] { protected def parseRaw(value:String) : Long = value.toLong + override def sqlType: String = "bigint" override def sparkType : DataType = org.apache.spark.sql.types.LongType } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/types/Record.scala b/flowman-core/src/main/scala/com/dimajix/flowman/types/Record.scala new file mode 100644 index 000000000..fd3609318 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/types/Record.scala @@ -0,0 +1,124 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.types + +import scala.collection.JavaConverters._ + +import com.fasterxml.jackson.core.JsonParser +import com.fasterxml.jackson.core.JsonProcessingException +import com.fasterxml.jackson.databind.DeserializationContext +import com.fasterxml.jackson.databind.JsonMappingException +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.annotation.JsonDeserialize +import com.fasterxml.jackson.databind.deser.std.StdDeserializer +import com.fasterxml.jackson.databind.node.JsonNodeType + + +private class RecordDeserializer(vc:Class[_]) extends StdDeserializer[Record](vc) { + import java.io.IOException + + def this() = this(null) + + @throws[IOException] + @throws[JsonProcessingException] + def deserialize(jp: JsonParser, ctxt: DeserializationContext): Record = { + val node = jp.getCodec.readTree[JsonNode](jp) + node.getNodeType match { + case JsonNodeType.NULL => { + ValueRecord(null) + } + case JsonNodeType.BOOLEAN|JsonNodeType.NUMBER|JsonNodeType.STRING => { + ValueRecord(node.asText) + } + case JsonNodeType.ARRAY => { + val values = node.iterator().asScala.map { node => + node.getNodeType match { + case JsonNodeType.NULL => null + case JsonNodeType.BOOLEAN|JsonNodeType.NUMBER|JsonNodeType.STRING => node.asText + case _ => throw JsonMappingException.from(jp, "Wrong type for record") + } + }.toList + ArrayRecord(values) + } + case JsonNodeType.OBJECT => { + val values = node.fields().asScala.map { kv => + kv.getValue.getNodeType match { + case JsonNodeType.NULL => kv.getKey -> null + case JsonNodeType.BOOLEAN|JsonNodeType.NUMBER|JsonNodeType.STRING => kv.getKey -> kv.getValue.asText + case _ => throw JsonMappingException.from(jp, "Wrong type for record") + } + }.toMap + MapRecord(values) + } + case _ => throw JsonMappingException.from(jp, "Wrong type for record") + } + } +} + + +@JsonDeserialize(using=classOf[RecordDeserializer]) +sealed abstract class Record { + def toArray(schema:StructType) : Array[String] + def map(fn:String => String) : Record +} + +final case class ValueRecord(value:String) extends Record { + override def toArray(schema: StructType): Array[String] = { + // Append default values + val tail = schema.fields.tail.map(_.default.orNull).toArray + Array(value) ++ tail + } + override def map(fn:String => String) : ValueRecord = { + ValueRecord(fn(value)) + } +} + +object ArrayRecord { + def apply(field:String, fields:String*) : ArrayRecord = { + ArrayRecord(field +: fields) + } +} +final case class ArrayRecord(fields:Seq[String]) extends Record { + override def toArray(schema: StructType): Array[String] = { + if (fields.length >= schema.fields.length) { + // Either chop off trailing fields + fields.take(schema.fields.length).toArray + } + else { + // Or append default values + val tail = schema.fields.drop(fields.length).map(_.default.orNull).toArray + fields.toArray ++ tail + } + } + override def map(fn:String => String) : ArrayRecord = { + ArrayRecord(fields.map(fn)) + } +} + +object MapRecord { + def apply(value:(String,String), values:(String,String)*) : MapRecord = { + MapRecord((value +: values).toMap) + } +} +final case class MapRecord(values:Map[String,String]) extends Record { + override def toArray(schema: StructType): Array[String] = { + schema.fields.map(field => values.getOrElse(field.name, field.default.orNull)).toArray + } + override def map(fn:String => String) : MapRecord = { + MapRecord(values.map(kv => kv._1 -> fn(kv._2))) + } +} diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/types/ShortType.scala b/flowman-core/src/main/scala/com/dimajix/flowman/types/ShortType.scala index bec062b8d..23fecace4 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/types/ShortType.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/types/ShortType.scala @@ -22,5 +22,6 @@ import org.apache.spark.sql.types.DataType case object ShortType extends IntegralType[Short] { protected def parseRaw(value:String) : Short = value.toShort + override def sqlType: String = "smallint" override def sparkType : DataType = org.apache.spark.sql.types.ShortType } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/util/ConsoleColors.scala b/flowman-core/src/main/scala/com/dimajix/flowman/util/ConsoleColors.scala new file mode 100644 index 000000000..939315590 --- /dev/null +++ b/flowman-core/src/main/scala/com/dimajix/flowman/util/ConsoleColors.scala @@ -0,0 +1,44 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.util + +object ConsoleColors { + var disabled = false + + val NORMAL = Console.RESET + val CYAN = Console.CYAN + val CYAN_BOLD = Console.CYAN + Console.BOLD + val YELLOW = Console.YELLOW + val YELLOW_BOLD = Console.YELLOW + Console.BOLD + val RED = Console.RED + val RED_BOLD = Console.RED + Console.BOLD + val GREEN = Console.GREEN + val GREEN_BOLD = Console.GREEN + Console.BOLD + val WHITE = Console.WHITE + val WHITE_BOLD = Console.WHITE + Console.BOLD + + def white(str:String) : String = if (disabled) str else WHITE + str + NORMAL + def boldWhite(str:String) : String = if (disabled) str else WHITE_BOLD + str + NORMAL + def green(str:String) : String = if (disabled) str else GREEN + str + NORMAL + def boldGreen(str:String) : String = if (disabled) str else GREEN_BOLD + str + NORMAL + def red(str:String) : String = if (disabled) str else RED + str + NORMAL + def boldRed(str:String) : String = if (disabled) str else RED_BOLD + str + NORMAL + def yellow(str:String) : String = if (disabled) str else YELLOW + str + NORMAL + def boldYellow(str:String) : String = if (disabled) str else YELLOW_BOLD + str + NORMAL + def cyan(str:String) : String = if (disabled) str else CYAN + str + NORMAL + def boldCyan(str:String) : String = if (disabled) str else CYAN_BOLD + str + NORMAL +} diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/ConsoleUtils.scala b/flowman-core/src/main/scala/com/dimajix/flowman/util/ConsoleUtils.scala similarity index 68% rename from flowman-tools/src/main/scala/com/dimajix/flowman/tools/ConsoleUtils.scala rename to flowman-core/src/main/scala/com/dimajix/flowman/util/ConsoleUtils.scala index b266d3345..63dec951a 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/ConsoleUtils.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/util/ConsoleUtils.scala @@ -14,12 +14,41 @@ * limitations under the License. */ -package com.dimajix.flowman.tools +package com.dimajix.flowman.util +import java.io.OutputStreamWriter + +import com.univocity.parsers.csv.CsvWriter +import com.univocity.parsers.csv.CsvWriterSettings import org.apache.commons.lang3.StringUtils +import org.apache.spark.sql.DataFrame object ConsoleUtils { + def showDataFrame(df:DataFrame, limit: Int = 100, csv:Boolean=false) : Unit = { + if (csv) { + val result = df.limit(limit).collect() + val writer = new OutputStreamWriter(Console.out) + try { + val csvWriter = new CsvWriter(writer, new CsvWriterSettings()) + csvWriter.writeHeaders(df.columns: _*) + result.foreach { record => + val fields = record.toSeq.map { + case null => null + case f => f.toString + } + csvWriter.writeRow(fields: _*) + } + } + finally { + writer.flush() + } + } + else { + df.show(limit) + } + } + def showTable(records:Seq[Product], columns:Seq[String]) : Unit = { println(showTableString(records, columns)) } diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/util/SchemaUtils.scala b/flowman-core/src/main/scala/com/dimajix/flowman/util/SchemaUtils.scala index 9a77c4b47..2abbcb5d1 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/util/SchemaUtils.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/util/SchemaUtils.scala @@ -73,21 +73,28 @@ object SchemaUtils { * @param schema * @return */ - def applySchema(df:DataFrame, schema:Option[StructType]) : DataFrame = { + def applySchema(df:DataFrame, schema:Option[StructType], insertNulls:Boolean=true) : DataFrame = { require(df != null) require(schema != null) - val dfFieldsByName = df.schema.map(f => f.name.toLowerCase(Locale.ROOT) -> f).toMap - schema - .map { schema => - val columns = schema.map { field => - dfFieldsByName.get(field.name.toLowerCase(Locale.ROOT)) - .map(_ => df(field.name).cast(field.dataType).as(field.name, field.metadata)) - .getOrElse(lit(null).cast(field.dataType).as(field.name, field.metadata)) - } - df.select(columns: _*) + def applySchema(df:DataFrame, schema:StructType, insertNulls:Boolean) : DataFrame = { + val dfFieldsByName = df.schema.map(f => f.name.toLowerCase(Locale.ROOT) -> f).toMap + val columns = schema.map { field => + dfFieldsByName.get(field.name.toLowerCase(Locale.ROOT)) + .map(_ => df(field.name).cast(field.dataType).as(field.name, field.metadata)) + .getOrElse { + if (!insertNulls) + throw new IllegalArgumentException(s"Missing column '${field.name}' in input DataFrame") + lit(null).cast(field.dataType).as(field.name, field.metadata) + } } - .getOrElse(df) + df.select(columns: _*) + } + + schema match { + case Some(s) => applySchema(df, s, insertNulls) + case None => df + } } /** diff --git a/flowman-core/src/main/scala/com/dimajix/flowman/util/UtcTimestamp.scala b/flowman-core/src/main/scala/com/dimajix/flowman/util/UtcTimestamp.scala index be065de8f..25488484e 100644 --- a/flowman-core/src/main/scala/com/dimajix/flowman/util/UtcTimestamp.scala +++ b/flowman-core/src/main/scala/com/dimajix/flowman/util/UtcTimestamp.scala @@ -98,7 +98,7 @@ object UtcTimestamp { * which uses local date time * @param dt */ -class UtcTimestamp(dt:LocalDateTime) { +case class UtcTimestamp(dt:LocalDateTime) { import UtcTimestamp.formatter def this(msecs:Long) = { diff --git a/flowman-core/src/test/scala/com/dimajix/common/IdentityHashMapTest.scala b/flowman-core/src/test/scala/com/dimajix/common/IdentityHashMapTest.scala new file mode 100644 index 000000000..3fef6473f --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/common/IdentityHashMapTest.scala @@ -0,0 +1,45 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.common + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.common.IdentityHashMapTest.SomeClass + +object IdentityHashMapTest { + case class SomeClass(value:Int) +} + +class IdentityHashMapTest extends AnyFlatSpec with Matchers { + "The IdentityHashMap" should "work" in { + val map = IdentityHashMap[SomeClass,String]() + + val key = SomeClass(3) + key should be (SomeClass(3)) + + map.put(key, "three") + map.contains(key) should be (true) + map.contains(SomeClass(3)) should be (false) + } + + it should "provide empty maps" in { + val map = IdentityHashMap[SomeClass,String]() + + map.empty should be (IdentityHashMap.empty) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/common/IdentityHashSetTest.scala b/flowman-core/src/test/scala/com/dimajix/common/IdentityHashSetTest.scala new file mode 100644 index 000000000..2f153ad11 --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/common/IdentityHashSetTest.scala @@ -0,0 +1,46 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.common + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.common.IdentityHashSetTest.SomeClass + + +object IdentityHashSetTest { + case class SomeClass(value:Int) +} + +class IdentityHashSetTest extends AnyFlatSpec with Matchers { + "The IdentityHashSet" should "work" in { + val set = IdentityHashSet[SomeClass]() + + val key = SomeClass(3) + key should be (SomeClass(3)) + + set.add(key) + set.contains(key) should be (true) + set.contains(SomeClass(3)) should be (false) + } + + it should "provide empty sets" in { + val map = IdentityHashSet[SomeClass]() + + map.empty should be (IdentityHashSet.empty) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/common/MapIgnoreCaseTest.scala b/flowman-core/src/test/scala/com/dimajix/common/MapIgnoreCaseTest.scala new file mode 100644 index 000000000..2980378cc --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/common/MapIgnoreCaseTest.scala @@ -0,0 +1,106 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.dimajix.common + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + + +class MapIgnoreCaseTest extends AnyFlatSpec with Matchers { + "The MapIgnoreCase" should "work" in { + val map = MapIgnoreCase( + "a" -> 1, + "B" -> 2, + "X" -> 3 + ) + + map.contains("a") should be (true) + map.contains("A") should be (true) + map.contains("b") should be (true) + map.contains("B") should be (true) + map.contains("c") should be (false) + map.contains("C") should be (false) + + map.keys should be (Set("a", "b", "x")) + } + + it should "be constructable from a single element" in { + val map = MapIgnoreCase( + "a" -> 1 + ) + + map should be (MapIgnoreCase("a" -> 1)) + } + + it should "be constructable from a traditional Map" in { + val map = MapIgnoreCase(Map( + "a" -> 1, + "B" -> 2 + )) + + map should be (MapIgnoreCase("a" -> 1,"B" -> 2)) + } + + it should "be constructable from a traditional Seq" in { + val map = MapIgnoreCase(Seq( + "a" -> 1, + "B" -> 2 + )) + + map should be (MapIgnoreCase("a" -> 1,"B" -> 2)) + map should be (MapIgnoreCase("A" -> 1,"b" -> 2)) + } + + it should "support adding elements" in { + MapIgnoreCase("a" -> 1) + ("b" -> 2) should be (MapIgnoreCase("a" -> 1,"B" -> 2)) + MapIgnoreCase("a" -> 1) + ("a" -> 2) should be (MapIgnoreCase("a" -> 2)) + MapIgnoreCase("a" -> 1) + ("A" -> 2) should be (MapIgnoreCase("a" -> 2)) + } + + it should "support removing elements" in { + MapIgnoreCase("a" -> 1) - "b" should be (MapIgnoreCase[Int]("a" -> 1)) + MapIgnoreCase("a" -> 1) - "a" should be (MapIgnoreCase[Int]()) + MapIgnoreCase("a" -> 1) - "A" should be (MapIgnoreCase[Int]()) + } + + it should "support converting to Seq" in { + val map = MapIgnoreCase[Int](Seq( + "a" -> 1, + "B" -> 2 + )) + + map.toSeq should be (Seq("a" -> 1, "B" -> 2)) + } + + it should "support converting to Map" in { + val map = MapIgnoreCase[Int](Map( + "a" -> 1, + "B" -> 2 + )) + + map.toMap should be (Map("a" -> 1, "B" -> 2)) + } + + it should "support mapValues" in { + val map = MapIgnoreCase[Int](Seq( + "a" -> 1, + "B" -> 2 + )) + + map.mapValues(v => 2*v) should be (MapIgnoreCase("a" -> 2, "B" -> 4)) + map.mapValues(v => 2*v).toSeq should be (Seq("a" -> 2, "B" -> 4)) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/common/TrileanTest.scala b/flowman-core/src/test/scala/com/dimajix/common/TrileanTest.scala index 2af906278..1bf695083 100644 --- a/flowman-core/src/test/scala/com/dimajix/common/TrileanTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/common/TrileanTest.scala @@ -16,11 +16,11 @@ package com.dimajix.common -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class TrileanTest extends FlatSpec with Matchers { +class TrileanTest extends AnyFlatSpec with Matchers { "Trileans" should "provide toString" in { No.toString should be ("no") Yes.toString should be ("yes") diff --git a/flowman-core/src/test/scala/com/dimajix/common/text/CaseUtilsTest.scala b/flowman-core/src/test/scala/com/dimajix/common/text/CaseUtilsTest.scala index c9b55aa7c..c1255fba0 100644 --- a/flowman-core/src/test/scala/com/dimajix/common/text/CaseUtilsTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/common/text/CaseUtilsTest.scala @@ -16,11 +16,11 @@ package com.dimajix.common.text -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class CaseUtilsTest extends FlatSpec with Matchers { +class CaseUtilsTest extends AnyFlatSpec with Matchers { "CaseUtils" should "support generic splitting" in { CaseUtils.splitGeneric("ThisIsATest") should be (Seq("This", "Is", "ATest")) CaseUtils.splitGeneric("thisIsATest") should be (Seq("this", "Is", "ATest")) diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/DateTimeFormatTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/DateTimeFormatTest.scala index 8a8675c75..d6e941e69 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/DateTimeFormatTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/DateTimeFormatTest.scala @@ -20,13 +20,13 @@ import java.time.Instant import java.time.ZoneId import java.time.format.DateTimeFormatter -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers /** * Created by kaya on 11.10.16. */ -class DateTimeFormatTest extends FlatSpec with Matchers { +class DateTimeFormatTest extends AnyFlatSpec with Matchers { "The instant" should "be formattable in UTC" in { val instant = Instant.ofEpochSecond(123456789l).atZone(ZoneId.of("UTC")) val formatter = DateTimeFormatter.ofPattern("yyyy/MM/dd/HH") diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/SparkSessionTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/SparkSessionTest.scala index e320b3e66..7d9a8d117 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/SparkSessionTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/SparkSessionTest.scala @@ -16,12 +16,13 @@ package com.dimajix.flowman -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.testing.LocalSparkSession -class SparkSessionTest extends FlatSpec with Matchers with LocalSparkSession { + +class SparkSessionTest extends AnyFlatSpec with Matchers with LocalSparkSession { "A SparkSession" should "have configurable properties" in { spark.conf.set("spark.sql.prop1", "p1") spark.conf.get("spark.sql.prop1") should be ("p1") diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/SqlExpressionTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/SqlExpressionTest.scala index c4400b151..72e128771 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/SqlExpressionTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/SqlExpressionTest.scala @@ -17,11 +17,11 @@ package com.dimajix.flowman import org.apache.spark.sql.catalyst.parser.CatalystSqlParser -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class SqlExpressionTest extends FlatSpec with Matchers { +class SqlExpressionTest extends AnyFlatSpec with Matchers { "The SqlParser" should "parse simple columns" in { val parser = CatalystSqlParser val result = parser.parseExpression("some_column") diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/catalog/PartitionSpecTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/catalog/PartitionSpecTest.scala index aaa921d8c..4a0892c44 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/catalog/PartitionSpecTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/catalog/PartitionSpecTest.scala @@ -17,11 +17,11 @@ package com.dimajix.flowman.catalog import org.apache.hadoop.fs.Path -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class PartitionSpecTest extends FlatSpec with Matchers { +class PartitionSpecTest extends AnyFlatSpec with Matchers { "The PartitionSpec" should "provide a Hive compatible path" in { val partitionSpec = PartitionSpec(Map( "p1" -> "lala", diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/config/FlowmanConfTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/config/FlowmanConfTest.scala new file mode 100644 index 000000000..052df2c14 --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/config/FlowmanConfTest.scala @@ -0,0 +1,41 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.config + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.SimpleExecutor + + +class FlowmanConfTest extends AnyFlatSpec with Matchers { + "The FlowmanConf" should "work with ints" in { + val conf = new FlowmanConf(Map("some_int" -> "32")) + conf.getConf(FlowmanConf.DEFAULT_TARGET_PARALLELISM) should be (16) + conf.get("some_int") should be ("32") + } + + it should "work with classes" in { + val conf = new FlowmanConf(Map()) + val clazz = conf.getConf(FlowmanConf.EXECUTION_EXECUTOR_CLASS) + + clazz should be (classOf[SimpleExecutor]) + val ctor = clazz.getDeclaredConstructor() + val executor = ctor.newInstance() + executor shouldBe a[SimpleExecutor] + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/ContextTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/ContextTest.scala index 8adeb0d1a..a10380a46 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/execution/ContextTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/ContextTest.scala @@ -17,11 +17,11 @@ package com.dimajix.flowman.execution import org.apache.velocity.exception.MethodInvocationException -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class ContextTest extends FlatSpec with Matchers { +class ContextTest extends AnyFlatSpec with Matchers { "Evaluation" should "work (1)" in { val context = RootContext.builder() .withEnvironment(Map("env_1" -> "value_1"), SettingLevel.NONE) diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/EnvironmentTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/EnvironmentTest.scala new file mode 100644 index 000000000..d9196c08a --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/EnvironmentTest.scala @@ -0,0 +1,103 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.execution + +import java.util.NoSuchElementException + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + + +class EnvironmentTest extends AnyFlatSpec with Matchers { + "The Environment" should "provide access to some system variables" in { + val environment = new Environment(Map()) + + environment.evaluate("${System.getenv('PATH')}") should be (System.getenv("PATH")) + environment.evaluate("${System.getenv('NO_SUCH_ENV')}") should be ("") + environment.evaluate("${System.getenv('NO_SUCH_ENV', 'default')}") should be ("default") + + environment.evaluate("$System.getenv('PATH')") should be (System.getenv("PATH")) + environment.evaluate("$System.getenv('NO_SUCH_ENV')") should be ("") + environment.evaluate("$System.getenv('NO_SUCH_ENV', 'default')") should be ("default") + } + + it should "provide access to Java Integer class" in { + val environment = new Environment(Map()) + + environment.evaluate("${Integer.parse('2')}") should be ("2") + environment.evaluate("${Integer.valueOf('2')}") should be ("2") + } + + it should "provide access to Java Float class" in { + val environment = new Environment(Map()) + + environment.evaluate("${Float.parse('2')}") should be ("2.0") + environment.evaluate("${Float.valueOf('2')}") should be ("2.0") + } + + it should "provide access to Java Duration class" in { + val environment = new Environment(Map()) + + environment.evaluate("${Duration.ofDays(2)}") should be ("PT48H") + environment.evaluate("${Duration.parse('P2D').getSeconds()}") should be ("172800") + } + + it should "support evaluation of variables" in { + val environment = new Environment(Map("var1" -> "val1")) + + environment.evaluate(null:String) should be (null) + environment.evaluate("$var1") should be ("val1") + environment.evaluate("$var1 + $var2", Map("var2" -> "val2")) should be ("val1 + val2") + environment.evaluate("$var1", Map("var1" -> "val2")) should be ("val2") + } + + it should "directly work with Options" in { + val environment = new Environment(Map("var1" -> "val1")) + + environment.evaluate(Some("$var1")) should be (Some("val1")) + environment.evaluate(None) should be (None) + environment.evaluate(Some("$var1 + $var2"), Map("var2" -> "val2")) should be (Some("val1 + val2")) + environment.evaluate(None, Map("var2" -> "val2")) should be (None) + } + + it should "directly work with Maps" in { + val environment = new Environment(Map("var1" -> "val1")) + + environment.evaluate(Map("a" -> "$var1", "b" -> "b")) should be(Map("a" -> "val1", "b" -> "b")) + environment.evaluate(Map("a" -> "$var1", "b" -> "$var2"), Map("var2" -> "val2")) should be(Map("a" -> "val1", "b" -> "val2")) + } + + it should "provide access to its key-value map" in { + val environment = new Environment(Map("var1" -> "val1", "var2" -> "_${var1}_")) + + environment.toMap should be (Map("var1" -> "val1", "var2" -> "_val1_")) + environment.toSeq.sortBy(_._1) should be (Seq("var1" -> "val1", "var2" -> "_val1_").sortBy(_._1)) + environment.keys should be (Set("var1", "var2")) + } + + it should "provide map-like access" in { + val environment = new Environment(Map("var1" -> "val1", "var2" -> "_${var1}_")) + + environment("var1") should be ("val1") + environment("var2") should be ("_val1_") + a[NoSuchElementException] should be thrownBy(environment("no_such_variable")) + + environment.get("var1") should be (Some("val1")) + environment.get("var2") should be (Some("_val1_")) + environment.get("no_such_variable") should be (None) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/MappingUtilsTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/MappingUtilsTest.scala new file mode 100644 index 000000000..67f1bdb85 --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/MappingUtilsTest.scala @@ -0,0 +1,96 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.execution + +import org.apache.hadoop.fs.Path +import org.apache.spark.sql.DataFrame +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.MappingUtilsTest.DummyMappingSpec +import com.dimajix.flowman.model.BaseMapping +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.ResourceIdentifier +import com.dimajix.flowman.model.Template + + +object MappingUtilsTest { + case class DummyMapping( + override val context: Context, + override val name: String, + override val inputs: Seq[MappingOutputIdentifier], + override val requires: Set[ResourceIdentifier] + ) extends BaseMapping { + protected override def instanceProperties: Mapping.Properties = Mapping.Properties(context, name) + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = ??? + } + + case class DummyMappingSpec( + name: String, + inputs: Seq[MappingOutputIdentifier], + requires: Set[ResourceIdentifier] + ) extends Template[Mapping] { + override def instantiate(context: Context): Mapping = DummyMapping(context, name, inputs, requires) + } +} + +class MappingUtilsTest extends AnyFlatSpec with Matchers { + "The MappingUtils" should "collect all requirements of a mapping" in { + val project = Project( + "test", + mappings = Map( + "m1" -> DummyMappingSpec( + "m1", + Seq(), + Set(ResourceIdentifier.ofFile(new Path("file1"))) + ), + "m2" -> DummyMappingSpec( + "m2", + Seq(), + Set(ResourceIdentifier.ofFile(new Path("file1")), ResourceIdentifier.ofFile(new Path("file2"))) + ), + "m3" -> DummyMappingSpec( + "m3", + Seq(MappingOutputIdentifier("m1")), + Set(ResourceIdentifier.ofFile(new Path("file3"))) + ), + "m4" -> DummyMappingSpec( + "m4", + Seq(MappingOutputIdentifier("m1"), MappingOutputIdentifier("m2"), MappingOutputIdentifier("m3")), + Set() + ) + ) + ) + + val session = Session.builder() + .build() + + val context = session.getContext(project) + val mapping = context.getMapping(MappingIdentifier("m4")) + + val expected = Set( + ResourceIdentifier.ofFile(new Path("file1")), + ResourceIdentifier.ofFile(new Path("file2")), + ResourceIdentifier.ofFile(new Path("file3")) + ) + MappingUtils.requires(mapping) should be (expected) + MappingUtils.requires(context, MappingIdentifier("m4")) should be (expected) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/OutputModeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/OutputModeTest.scala index 95300585d..6542d05d4 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/execution/OutputModeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/OutputModeTest.scala @@ -16,11 +16,11 @@ package com.dimajix.flowman.execution -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class OutputModeTest extends FlatSpec with Matchers { +class OutputModeTest extends AnyFlatSpec with Matchers { "The OutputMode" should "parse correctly" in { OutputMode.ofString("OVERWRITE") should be (OutputMode.OVERWRITE) OutputMode.ofString("overwrite") should be (OutputMode.OVERWRITE) diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/ProjectContextTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/ProjectContextTest.scala index bff2cdddd..ceab99d92 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/execution/ProjectContextTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/ProjectContextTest.scala @@ -17,14 +17,14 @@ package com.dimajix.flowman.execution import org.apache.hadoop.conf.Configuration -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.hadoop.FileSystem import com.dimajix.flowman.model.Project -class ProjectContextTest extends FlatSpec with Matchers { +class ProjectContextTest extends AnyFlatSpec with Matchers { "The ProjectContext" should "provide Project related vars" in { val project = Project( name = "my_project", diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/RootContextTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/RootContextTest.scala index 7f56363ec..17c74a3d0 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/execution/RootContextTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/RootContextTest.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018 Kaya Kupferschmidt + * Copyright 2021 Kaya Kupferschmidt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,49 +16,222 @@ package com.dimajix.flowman.execution -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import com.dimajix.flowman.model.Connection +import com.dimajix.flowman.model.ConnectionIdentifier +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.Namespace +import com.dimajix.flowman.model.Profile +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.Relation +import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.Template + + +class RootContextTest extends AnyFlatSpec with Matchers with MockFactory { + "The RootContext" should "apply profiles" in { + val namespace = Namespace( + name = "default", + profiles = Map( + "profile" -> Profile(name="profile") + ) + ) + val project = Project( + name = "my_project", + profiles = Map( + "profile" -> Profile(name="profile") + ) + ) -class RootContextTest extends FlatSpec with Matchers { - "The RootContext" should "provide access to some system variables" in { val session = Session.builder() + .withNamespace(namespace) + .withProfile("profile") + .withProfile("profile2") .build() - val context = session.context - context.evaluate("${System.getenv('PATH')}") should be (System.getenv("PATH")) - context.evaluate("${System.getenv('NO_SUCH_ENV')}") should be ("") - context.evaluate("${System.getenv('NO_SUCH_ENV', 'default')}") should be ("default") + val rootContext = session.context + rootContext.profiles should be (Set("profile", "profile2")) - context.evaluate("$System.getenv('PATH')") should be (System.getenv("PATH")) - context.evaluate("$System.getenv('NO_SUCH_ENV')") should be ("") - context.evaluate("$System.getenv('NO_SUCH_ENV', 'default')") should be ("default") + val projectContext = session.getContext(project) + projectContext.profiles should be (Set("profile", "profile2")) } - it should "provide access to Java Integer class" in { + it should "correctly lookup connections in profiles" in { + val namespaceConnectionTemplate = mock[Template[Connection]] + val namespaceConnection = mock[Connection] + val namespaceProfileConnectionTemplate = mock[Template[Connection]] + val namespaceProfileConnection = mock[Connection] + val namespace = Namespace( + name = "default", + connections = Map("con_namespace" -> namespaceConnectionTemplate), + profiles = Map( + "profile" -> Profile( + name="profile", + connections = Map("con_namespace_profile" -> namespaceProfileConnectionTemplate) + ) + ) + ) + + val projectConnectionTemplate = mock[Template[Connection]] + val projectConnection = mock[Connection] + val projectProfileConnectionTemplate = mock[Template[Connection]] + val projectProfileConnection = mock[Connection] + val project = Project( + name = "my_project", + connections = Map("con_project" -> projectConnectionTemplate), + profiles = Map( + "profile" -> Profile( + name="profile", + connections = Map("con_project_profile" -> projectProfileConnectionTemplate) + ) + ) + ) + val session = Session.builder() + .withNamespace(namespace) + .withProfile("profile") + .withProfile("profile2") .build() - val context = session.context - context.evaluate("${Integer.parse('2')}") should be ("2") - context.evaluate("${Integer.valueOf('2')}") should be ("2") + // Access everything via root context + val rootContext = session.context + (namespaceConnectionTemplate.instantiate _).expects(rootContext).returns(namespaceConnection) + rootContext.getConnection(ConnectionIdentifier("con_namespace")) should be (namespaceConnection) + (namespaceProfileConnectionTemplate.instantiate _).expects(rootContext).returns(namespaceProfileConnection) + rootContext.getConnection(ConnectionIdentifier("con_namespace_profile")) should be (namespaceProfileConnection) + a[NoSuchConnectionException] should be thrownBy (rootContext.getConnection(ConnectionIdentifier("con_project"))) + a[NoSuchConnectionException] should be thrownBy (rootContext.getConnection(ConnectionIdentifier("con_project_profile"))) + a[NoSuchProjectException] should be thrownBy (rootContext.getConnection(ConnectionIdentifier("my_project/con_project"))) + a[NoSuchProjectException] should be thrownBy (rootContext.getConnection(ConnectionIdentifier("my_project/con_namespace"))) + a[NoSuchProjectException] should be thrownBy (rootContext.getConnection(ConnectionIdentifier("my_project/con_namespace_profile"))) + a[NoSuchProjectException] should be thrownBy (rootContext.getConnection(ConnectionIdentifier("my_project/con_project_profile"))) + + // Access everything via project context + val projectContext = session.getContext(project) + projectContext.getConnection(ConnectionIdentifier("con_namespace")) should be (namespaceConnection) + projectContext.getConnection(ConnectionIdentifier("con_namespace_profile")) should be (namespaceProfileConnection) + (projectConnectionTemplate.instantiate _).expects(projectContext).returns(projectConnection) + projectContext.getConnection(ConnectionIdentifier("con_project")) should be (projectConnection) + (projectProfileConnectionTemplate.instantiate _).expects(projectContext).returns(projectProfileConnection) + projectContext.getConnection(ConnectionIdentifier("con_project_profile")) should be (projectProfileConnection) + projectContext.getConnection(ConnectionIdentifier("my_project/con_project")) should be (projectConnection) + a[NoSuchConnectionException] should be thrownBy (projectContext.getConnection(ConnectionIdentifier("my_project/con_namespace"))) + a[NoSuchConnectionException] should be thrownBy (projectContext.getConnection(ConnectionIdentifier("my_project/con_namespace_profile"))) + projectContext.getConnection(ConnectionIdentifier("my_project/con_project_profile")) should be (projectProfileConnection) + a[NoSuchProjectException] should be thrownBy (projectContext.getConnection(ConnectionIdentifier("no_such_project/con_project_profile"))) + + // Again try to access project resources after its context has been created + rootContext.getConnection(ConnectionIdentifier("my_project/con_project")) should be (projectConnection) + a[NoSuchConnectionException] should be thrownBy (rootContext.getConnection(ConnectionIdentifier("my_project/con_namespace"))) + a[NoSuchConnectionException] should be thrownBy (rootContext.getConnection(ConnectionIdentifier("my_project/con_namespace_profile"))) + rootContext.getConnection(ConnectionIdentifier("my_project/con_project_profile")) should be (projectProfileConnection) } - it should "provide access to Java Float class" in { + it should "support override mappings" in { + val projectMapping1 = mock[Mapping] + val projectMappingTemplate1 = mock[Template[Mapping]] + val projectMapping2 = mock[Mapping] + val projectMappingTemplate2 = mock[Template[Mapping]] + val overrideMapping = mock[Mapping] + val overrideMappingTemplate = mock[Template[Mapping]] + + val project = Project( + name = "my_project", + mappings = Map( + "m1" -> projectMappingTemplate1, + "m2" -> projectMappingTemplate2 + ) + ) + val session = Session.builder() + .withProfile("profile") .build() - val context = session.context + val rootContext = RootContext.builder(session.context) + .overrideMappings(Map(MappingIdentifier("my_project/m2") -> overrideMappingTemplate)) + .build() + + // Access everything via root context + a[NoSuchMappingException] should be thrownBy (rootContext.getMapping(MappingIdentifier("m1"))) + a[NoSuchProjectException] should be thrownBy (rootContext.getMapping(MappingIdentifier("my_project/m1"))) + a[NoSuchMappingException] should be thrownBy (rootContext.getMapping(MappingIdentifier("m2"))) + a[NoSuchProjectException] should be thrownBy (rootContext.getMapping(MappingIdentifier("my_project/m2"))) - context.evaluate("${Float.parse('2')}") should be ("2.0") - context.evaluate("${Float.valueOf('2')}") should be ("2.0") + // Access everything via project context + val projectContext = rootContext.getProjectContext(project) + (projectMappingTemplate1.instantiate _).expects(projectContext).returns(projectMapping1) + projectContext.getMapping(MappingIdentifier("m1")) should be (projectMapping1) + projectContext.getMapping(MappingIdentifier("my_project/m1")) should be (projectMapping1) + projectContext.getMapping(MappingIdentifier("m1"), false) should be (projectMapping1) + projectContext.getMapping(MappingIdentifier("my_project/m1"), false) should be (projectMapping1) + (overrideMappingTemplate.instantiate _).expects(projectContext).returns(overrideMapping) + projectContext.getMapping(MappingIdentifier("m2")) should be (overrideMapping) + projectContext.getMapping(MappingIdentifier("my_project/m2")) should be (overrideMapping) + (projectMappingTemplate2.instantiate _).expects(projectContext).returns(projectMapping2) + projectContext.getMapping(MappingIdentifier("m2"), false) should be (projectMapping2) + projectContext.getMapping(MappingIdentifier("my_project/m2"), false) should be (projectMapping2) + + // Again try to access project resources after its context has been created + a[NoSuchMappingException] should be thrownBy (rootContext.getMapping(MappingIdentifier("m1"))) + a[NoSuchMappingException] should be thrownBy (rootContext.getMapping(MappingIdentifier("m2"))) + rootContext.getMapping(MappingIdentifier("my_project/m1")) should be (projectMapping1) + rootContext.getMapping(MappingIdentifier("my_project/m1"), false) should be (projectMapping1) + rootContext.getMapping(MappingIdentifier("my_project/m2")) should be (overrideMapping) + rootContext.getMapping(MappingIdentifier("my_project/m2"), false) should be (projectMapping2) } - it should "provide access to Java Duration class" in { + it should "support override relations" in { + val projectRelation1 = mock[Relation] + val projectRelationTemplate1 = mock[Template[Relation]] + val projectRelation2 = mock[Relation] + val projectRelationTemplate2 = mock[Template[Relation]] + val overrideRelation = mock[Relation] + val overrideRelationTemplate = mock[Template[Relation]] + + val project = Project( + name = "my_project", + relations = Map( + "m1" -> projectRelationTemplate1, + "m2" -> projectRelationTemplate2 + ) + ) + val session = Session.builder() + .withProfile("profile") + .build() + val rootContext = RootContext.builder(session.context) + .overrideRelations(Map(RelationIdentifier("my_project/m2") -> overrideRelationTemplate)) .build() - val context = session.context - context.evaluate("${Duration.ofDays(2)}") should be ("PT48H") - context.evaluate("${Duration.parse('P2D').getSeconds()}") should be ("172800") + // Access everything via root context + a[NoSuchRelationException] should be thrownBy (rootContext.getRelation(RelationIdentifier("m1"))) + a[NoSuchProjectException] should be thrownBy (rootContext.getRelation(RelationIdentifier("my_project/m1"))) + a[NoSuchRelationException] should be thrownBy (rootContext.getRelation(RelationIdentifier("m2"))) + a[NoSuchProjectException] should be thrownBy (rootContext.getRelation(RelationIdentifier("my_project/m2"))) + + // Access everything via project context + val projectContext = rootContext.getProjectContext(project) + (projectRelationTemplate1.instantiate _).expects(projectContext).returns(projectRelation1) + projectContext.getRelation(RelationIdentifier("m1")) should be (projectRelation1) + projectContext.getRelation(RelationIdentifier("my_project/m1")) should be (projectRelation1) + projectContext.getRelation(RelationIdentifier("m1"), false) should be (projectRelation1) + projectContext.getRelation(RelationIdentifier("my_project/m1"), false) should be (projectRelation1) + (overrideRelationTemplate.instantiate _).expects(projectContext).returns(overrideRelation) + projectContext.getRelation(RelationIdentifier("m2")) should be (overrideRelation) + projectContext.getRelation(RelationIdentifier("my_project/m2")) should be (overrideRelation) + (projectRelationTemplate2.instantiate _).expects(projectContext).returns(projectRelation2) + projectContext.getRelation(RelationIdentifier("m2"), false) should be (projectRelation2) + projectContext.getRelation(RelationIdentifier("my_project/m2"), false) should be (projectRelation2) + + // Again try to access project resources after its context has been created + a[NoSuchRelationException] should be thrownBy (rootContext.getRelation(RelationIdentifier("m1"))) + a[NoSuchRelationException] should be thrownBy (rootContext.getRelation(RelationIdentifier("m2"))) + rootContext.getRelation(RelationIdentifier("my_project/m1")) should be (projectRelation1) + rootContext.getRelation(RelationIdentifier("my_project/m1"), false) should be (projectRelation1) + rootContext.getRelation(RelationIdentifier("my_project/m2")) should be (overrideRelation) + rootContext.getRelation(RelationIdentifier("my_project/m2"), false) should be (projectRelation2) } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerHistoryTest.scala similarity index 67% rename from flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerTest.scala rename to flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerHistoryTest.scala index 44067f15d..d05966812 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerHistoryTest.scala @@ -19,39 +19,30 @@ package com.dimajix.flowman.execution import java.nio.file.Files import java.nio.file.Path -import scala.collection.immutable.Stream.Empty.force import scala.util.Random -import org.mockito.ArgumentMatchers.any -import org.mockito.ArgumentMatchers.isA import org.scalamock.scalatest.MockFactory import org.scalatest.BeforeAndAfter -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.config.FlowmanConf.EXECUTION_TARGET_FORCE_DIRTY +import com.dimajix.flowman.execution.RunnerHistoryTest.NullTarget import com.dimajix.flowman.history.JdbcStateStore import com.dimajix.flowman.model.BaseTarget -import com.dimajix.flowman.model.Hook import com.dimajix.flowman.model.Job -import com.dimajix.flowman.model.JobInstance -import com.dimajix.flowman.model.JobWrapper import com.dimajix.flowman.model.Metadata import com.dimajix.flowman.model.Namespace -import com.dimajix.flowman.model.NamespaceWrapper import com.dimajix.flowman.model.Project -import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.model.Target import com.dimajix.flowman.model.TargetIdentifier import com.dimajix.flowman.model.TargetInstance -import com.dimajix.flowman.model.Template import com.dimajix.flowman.types.StringType - -class RunnerTest extends FlatSpec with MockFactory with Matchers with BeforeAndAfter { +object RunnerHistoryTest { object NullTarget { def apply(name:String, partition: Map[String,String] = Map()) : Context => NullTarget = { ctx:Context => NullTarget(Target.Properties(ctx, name), ctx.evaluate(partition)) @@ -70,7 +61,10 @@ class RunnerTest extends FlatSpec with MockFactory with Matchers with BeforeAndA ) } } +} + +class RunnerHistoryTest extends AnyFlatSpec with MockFactory with Matchers with BeforeAndAfter { var tempDir:Path = _ before { @@ -81,74 +75,7 @@ class RunnerTest extends FlatSpec with MockFactory with Matchers with BeforeAndA tempDir.toFile.delete() } - "The Runner" should "correctly handle environments and arguments" in { - val session = Session.builder() - .withEnvironment("param", "global") - .withEnvironment("global_env", "global") - .build() - val context = session.context - val job = Job.builder(context) - .setName("my_job") - .addParameter("param", StringType) - .addEnvironment("global_env", "job") - .addEnvironment("job_env", "job") - .build() - - val args = Map( - "param" -> "lala" - ) - - val runner = session.runner - runner.withEnvironment(job, Phase.BUILD, args, force=false) { environment => - environment.toMap should be(Map( - "param" -> "lala", - "global_env" -> "global", - "job_env" -> "job", - "job" -> JobWrapper(job), - "force" -> false, - "phase" -> "build", - "namespace" -> NamespaceWrapper(None) - )) - } - } - - it should "work" in { - val session = Session.builder() - .build() - val job = Job.builder(session.context) - .setName("batch") - .build() - - val runner = session.runner - runner.executeJob(job, Seq(Phase.BUILD)) should be (Status.SUCCESS) - runner.executeJob(job, Seq(Phase.BUILD)) should be (Status.SUCCESS) - } - - it should "throw exceptions on missing parameters" in { - val session = Session.builder() - .build() - val job = Job.builder(session.context) - .setName("batch") - .addParameter("p1", StringType) - .build() - - val runner = session.runner - an[IllegalArgumentException] shouldBe thrownBy(runner.executeJob(job, Seq(Phase.BUILD))) - } - - it should "catch exceptions" in { - val session = Session.builder() - .build() - val job = Job.builder(session.context) - .setName("batch") - .addTarget(TargetIdentifier("some_target")) - .build() - - val runner = session.runner - runner.executeJob(job, Seq(Phase.BUILD)) should be (Status.FAILED) - } - - "The JdbcStateStore" should "work with empty jobs" in { + "The Runner with History" should "work with empty jobs" in { val db = tempDir.resolve("mydb") val connection = JdbcStateStore.Connection("jdbc:derby:"+db+";create=true", driver="org.apache.derby.jdbc.EmbeddedDriver") val ns = Namespace( @@ -221,6 +148,7 @@ class RunnerTest extends FlatSpec with MockFactory with Matchers with BeforeAndA def genTarget(name:String, dirty:Trilean) : Context => Target = (ctx:Context) => { val instance = TargetInstance("default", "default", name) val target = stub[Target] + (target.name _).when().returns(name) (target.before _).when().returns(Seq()) (target.after _).when().returns(Seq()) (target.phases _).when().returns(Lifecycle.ALL.toSet) @@ -331,45 +259,4 @@ class RunnerTest extends FlatSpec with MockFactory with Matchers with BeforeAndA runner.executeJob(job, Seq(Phase.BUILD), Map("p1" -> "v2")) should be (Status.SUCCESS) runner.executeJob(job, Seq(Phase.BUILD), Map("p1" -> "v2"), force=true) should be (Status.SUCCESS) } - - it should "invoke all hooks (in jobs and namespaces)" in { - val jobHook = mock[Hook] - val jobJobToken = new JobToken {} - val jobTargetToken = new TargetToken {} - (jobHook.startJob _).expects( where( (_:JobInstance, phase:Phase) => phase == Phase.BUILD) ).returning(jobJobToken) - (jobHook.finishJob _).expects(jobJobToken, Status.SUCCESS) - (jobHook.startTarget _).expects( where( (_:TargetInstance, phase:Phase, token:Option[JobToken]) => phase == Phase.BUILD && token == Some(jobJobToken))).returning(jobTargetToken) - (jobHook.finishTarget _).expects(jobTargetToken, Status.SUCCESS) - val namespaceHook = mock[Hook] - val namespaceJobToken = new JobToken {} - val namespaceTargetToken = new TargetToken {} - (namespaceHook.startJob _).expects( where( (_:JobInstance, phase:Phase) => phase == Phase.BUILD) ).returning(namespaceJobToken) - (namespaceHook.finishJob _).expects(namespaceJobToken, Status.SUCCESS) - (namespaceHook.startTarget _).expects( where( (_:TargetInstance, phase:Phase, token:Option[JobToken]) => phase == Phase.BUILD && token == Some(namespaceJobToken))).returning(namespaceTargetToken) - (namespaceHook.finishTarget _).expects(namespaceTargetToken, Status.SUCCESS) - - val ns = Namespace( - name = "default", - hooks = Seq(new Template[Hook] { - override def instantiate(context: Context): Hook = namespaceHook - }) - ) - val project = Project( - name = "default", - targets = Map("t0" -> NullTarget("t0", Map("p1" -> "$p1"))) - ) - val session = Session.builder() - .withNamespace(ns) - .withProject(project) - .build() - val job = Job.builder(session.getContext(project)) - .setName("job") - .addHook(jobHook) - .addParameter("p1", StringType) - .addTarget(TargetIdentifier("t0")) - .build() - - val runner = session.runner - runner.executeJob(job, Seq(Phase.BUILD), Map("p1" -> "v1")) should be (Status.SUCCESS) - } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerJobTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerJobTest.scala new file mode 100644 index 000000000..f920d4bd1 --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerJobTest.scala @@ -0,0 +1,400 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.execution + +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.RunnerJobTest.NullTarget +import com.dimajix.flowman.model.BaseTarget +import com.dimajix.flowman.model.Hook +import com.dimajix.flowman.model.Job +import com.dimajix.flowman.model.JobInstance +import com.dimajix.flowman.model.JobWrapper +import com.dimajix.flowman.model.Metadata +import com.dimajix.flowman.model.Namespace +import com.dimajix.flowman.model.NamespaceWrapper +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.ProjectWrapper +import com.dimajix.flowman.model.Target +import com.dimajix.flowman.model.TargetIdentifier +import com.dimajix.flowman.model.TargetInstance +import com.dimajix.flowman.model.Template +import com.dimajix.flowman.model.Test +import com.dimajix.flowman.types.StringType + + +object RunnerJobTest { + object NullTarget { + def apply(name:String, partition: Map[String,String] = Map()) : Context => NullTarget = { + ctx:Context => NullTarget(Target.Properties(ctx, name), ctx.evaluate(partition)) + } + } + case class NullTarget( + instanceProperties: Target.Properties, + partition: Map[String,String] + ) extends BaseTarget { + override def instance: TargetInstance = { + TargetInstance( + namespace.map(_.name).getOrElse(""), + project.map(_.name).getOrElse(""), + name, + partition + ) + } + } +} + + +class RunnerJobTest extends AnyFlatSpec with MockFactory with Matchers { + "The Runner for Jobs" should "correctly handle environments and arguments" in { + val project = Project( + name = "default", + environment = Map( + "project_env" -> "project", + "project_env_to_overwrite" -> "project" + ) + ) + val session = Session.builder() + .withEnvironment("global_env", "global") + .withEnvironment("global_env_to_overwrite", "global") + .build() + val context = session.getContext(project) + val job = Job.builder(context) + .setName("my_job") + .addParameter("param", StringType) + .addEnvironment("global_env_to_overwrite", "job") + .addEnvironment("project_env_to_overwrite", "job") + .addEnvironment("job_env", "job") + .build() + + val args = Map( + "param" -> "lala" + ) + + val runner = session.runner + runner.withJobContext(job, args, force=false, dryRun=false) { (context,args) => + args should be (Map("param" -> "lala")) + context.environment.toMap should be(Map( + "param" -> "lala", + "project_env" -> "project", + "project_env_to_overwrite" -> "job", + "global_env" -> "global", + "global_env_to_overwrite" -> "global", + "job_env" -> "job", + "job" -> JobWrapper(job), + "force" -> false, + "dryRun" -> false, + "project" -> ProjectWrapper(project), + "namespace" -> NamespaceWrapper(None) + )) + } + runner.withEnvironment(job, Phase.BUILD, args, force=false, dryRun=false) { environment => + environment.toMap should be(Map( + "param" -> "lala", + "project_env" -> "project", + "project_env_to_overwrite" -> "job", + "global_env" -> "global", + "global_env_to_overwrite" -> "global", + "job_env" -> "job", + "job" -> JobWrapper(job), + "force" -> false, + "dryRun" -> false, + "phase" -> "build", + "project" -> ProjectWrapper(project), + "namespace" -> NamespaceWrapper(None) + )) + } + } + + it should "work" in { + val session = Session.builder() + .build() + val job = Job.builder(session.context) + .setName("batch") + .build() + + val runner = session.runner + runner.executeJob(job, Seq(Phase.BUILD)) should be (Status.SUCCESS) + runner.executeJob(job, Seq(Phase.BUILD)) should be (Status.SUCCESS) + } + + it should "throw exceptions on missing parameters" in { + val session = Session.builder() + .build() + val job = Job.builder(session.context) + .setName("batch") + .addParameter("p1", StringType) + .build() + + val runner = session.runner + an[IllegalArgumentException] shouldBe thrownBy(runner.executeJob(job, Seq(Phase.BUILD))) + } + + it should "fail on missing targets" in { + val session = Session.builder() + .build() + val job = Job.builder(session.context) + .setName("batch") + .addTarget(TargetIdentifier("some_target")) + .build() + + val runner = session.runner + runner.executeJob(job, Seq(Phase.BUILD)) should be (Status.FAILED) + } + + it should "catch exceptions during execution" in { + val targetTemplate = mock[Template[Target]] + val target = mock[Target] + val project = Project( + "project", + targets = Map("some_target" -> targetTemplate) + ) + val session = Session.builder() + .build() + val job = Job.builder(session.getContext(project)) + .setName("batch") + .addTarget(TargetIdentifier("some_target")) + .build() + + (targetTemplate.instantiate _).expects(*).returns(target) + (target.name _).expects().returns("some_target") + (target.before _).expects().returns(Seq()) + (target.after _).expects().returns(Seq()) + (target.phases _).expects().atLeastOnce().returns(Set(Phase.BUILD)) + (target.requires _).expects(Phase.BUILD).atLeastOnce().returns(Set()) + (target.provides _).expects(Phase.BUILD).atLeastOnce().returns(Set()) + (target.identifier _).expects().atLeastOnce().returns(TargetIdentifier("project/some_target")) + (target.instance _).expects().atLeastOnce().returns(TargetInstance("default", "project", "some_target")) + (target.dirty _).expects(*, Phase.BUILD).returns(Yes) + (target.metadata _).expects().atLeastOnce().returns(Metadata(name="some_target", kind="target", category="target")) + (target.execute _).expects(*, Phase.BUILD).throwing(new UnsupportedOperationException()) + + val runner = session.runner + runner.executeJob(job, Seq(Phase.BUILD)) should be (Status.FAILED) + } + + it should "only execute specified targets" in { + def genTarget(name:String, toBeExecuted:Boolean) : Context => Target = (ctx:Context) => { + val instance = TargetInstance("default", "default", name) + val target = mock[Target] + (target.name _).expects().atLeastOnce().returns(name) + (target.before _).expects().atLeastOnce().returns(Seq()) + (target.after _).expects().atLeastOnce().returns(Seq()) + (target.phases _).expects().atLeastOnce().returns(Lifecycle.ALL.toSet) + (target.requires _).expects(*).atLeastOnce().returns(Set()) + (target.provides _).expects(*).atLeastOnce().returns(Set()) + (target.identifier _).expects().atLeastOnce().returns(TargetIdentifier(name)) + if (toBeExecuted) { + (target.instance _).expects().atLeastOnce().returns(instance) + (target.dirty _).expects(*, Phase.CREATE).returns(Yes) + (target.metadata _).expects().atLeastOnce().returns(Metadata(name=name, kind="target", category="target")) + (target.execute _).expects(*, Phase.CREATE).returning(Unit) + } else { + (target.execute _).expects(*, Phase.CREATE).never().returning(Unit) + } + + target + } + def genJob(session:Session, project:Project) : Job = { + Job.builder(session.getContext(session.project.get)) + .setTargets(project.targets.map(t => TargetIdentifier(t._1)).toSeq) + .build() + } + def genProject(targets:Map[String, Boolean]) : Project = { + Project( + name = "default", + targets = targets.map { case(name,toBeExecuted) => name -> Template.of(genTarget(name, toBeExecuted)) } + ) + } + + { + val project = genProject(Map("a" -> true, "ax" -> true, "b" -> false)) + val session = Session.builder() + .withProject(project) + .build() + val job = genJob(session, project) + val runner = session.runner + runner.executeJob(job, Seq(Phase.CREATE), targets=Seq("a.*".r)) should be(Status.SUCCESS) + } + } + + it should "not execute targets in dryMode" in { + def genTarget(name:String) : Context => Target = (ctx:Context) => { + val instance = TargetInstance("default", "default", name) + val target = mock[Target] + (target.name _).expects().atLeastOnce().returns(name) + (target.before _).expects().atLeastOnce().returns(Seq()) + (target.after _).expects().atLeastOnce().returns(Seq()) + (target.phases _).expects().atLeastOnce().returns(Lifecycle.ALL.toSet) + (target.requires _).expects(*).atLeastOnce().returns(Set()) + (target.provides _).expects(*).atLeastOnce().returns(Set()) + (target.identifier _).expects().atLeastOnce().returns(TargetIdentifier(name)) + (target.instance _).expects().atLeastOnce().returns(instance) + (target.dirty _).expects(*, Phase.CREATE).atLeastOnce().returns(Yes) + (target.execute _).expects(*, Phase.CREATE).never().returning(Unit) + + target + } + + val project = Project( + name = "default", + targets = Map("a" -> Template.of(genTarget("a"))) + ) + + val session = Session.builder() + .withProject(project) + .build() + val job = Job.builder(session.getContext(session.project.get)) + .setTargets(project.targets.map(t => TargetIdentifier(t._1)).toSeq) + .build() + + val runner = session.runner + runner.executeJob(job, Seq(Phase.CREATE), dryRun = true) should be(Status.SUCCESS) + } + + it should "stop execution in case of an exception" in { + def genTarget(name:String, throwsException:Boolean, toBeExecuted:Boolean, before:Seq[String]=Seq(), after:Seq[String]=Seq()) : Context => Target = (ctx:Context) => { + val instance = TargetInstance("default", "default", name) + val target = mock[Target] + (target.name _).expects().atLeastOnce().returns(name) + (target.before _).expects().atLeastOnce().returns(before.map(TargetIdentifier(_))) + (target.after _).expects().atLeastOnce().returns(after.map(TargetIdentifier(_))) + (target.phases _).expects().atLeastOnce().returns(Lifecycle.ALL.toSet) + (target.requires _).expects(*).atLeastOnce().returns(Set()) + (target.provides _).expects(*).atLeastOnce().returns(Set()) + (target.project _).expects().anyNumberOfTimes().returns(None) + (target.identifier _).expects().atLeastOnce().returns(TargetIdentifier(name)) + if (toBeExecuted) { + (target.instance _).expects().atLeastOnce().returns(instance) + (target.dirty _).expects(*, Phase.CREATE).atLeastOnce().returns(Yes) + (target.metadata _).expects().atLeastOnce().returns(Metadata(name=name, kind="target", category="target")) + if (throwsException) { + (target.execute _).expects(*, Phase.CREATE).throwing(new UnsupportedOperationException) + } + else { + (target.execute _).expects(*, Phase.CREATE).returning(Unit) + } + } else { + (target.execute _).expects(*, Phase.CREATE).never() + } + + target + } + + val project = Project( + name = "default", + targets = Map( + "t0" -> genTarget("t0", true, true), + "t1" -> genTarget("t1", false, false, after=Seq("t0")) + ) + ) + val session = Session.builder() + .withProject(project) + .build() + val job = Job.builder(session.getContext(session.project.get)) + .setTargets(project.targets.map(t => TargetIdentifier(t._1)).toSeq) + .build() + val runner = session.runner + runner.executeJob(job, Seq(Phase.CREATE)) should be(Status.FAILED) + } + + it should "continue execution in case of an exception with keep-going enabled" in { + def genTarget(name:String, throwsException:Boolean, before:Seq[String]=Seq(), after:Seq[String]=Seq()) : Context => Target = (ctx:Context) => { + val instance = TargetInstance("default", "default", name) + val target = mock[Target] + (target.name _).expects().atLeastOnce().returns(name) + (target.before _).expects().atLeastOnce().returns(before.map(TargetIdentifier(_))) + (target.after _).expects().atLeastOnce().returns(after.map(TargetIdentifier(_))) + (target.phases _).expects().atLeastOnce().returns(Lifecycle.ALL.toSet) + (target.requires _).expects(*).atLeastOnce().returns(Set()) + (target.provides _).expects(*).atLeastOnce().returns(Set()) + (target.project _).expects().anyNumberOfTimes().returns(None) + (target.identifier _).expects().atLeastOnce().returns(TargetIdentifier(name)) + (target.instance _).expects().atLeastOnce().returns(instance) + (target.dirty _).expects(*, Phase.CREATE).atLeastOnce().returns(Yes) + (target.metadata _).expects().atLeastOnce().returns(Metadata(name=name, kind="target", category="target")) + if (throwsException) { + (target.execute _).expects(*, Phase.CREATE).throwing(new UnsupportedOperationException) + } + else { + (target.execute _).expects(*, Phase.CREATE).returning(Unit) + } + + target + } + + val project = Project( + name = "default", + targets = Map( + "t0" -> genTarget("t0", true), + "t1" -> genTarget("t1", false, after=Seq("t0")) + ) + ) + val session = Session.builder() + .withProject(project) + .build() + val job = Job.builder(session.getContext(session.project.get)) + .setTargets(project.targets.map(t => TargetIdentifier(t._1)).toSeq) + .build() + val runner = session.runner + runner.executeJob(job, Seq(Phase.CREATE), keepGoing =true) should be(Status.FAILED) + } + + it should "invoke all hooks (in jobs and namespaces)" in { + val jobHook = mock[Hook] + val jobJobToken = new JobToken {} + val jobTargetToken = new TargetToken {} + (jobHook.startJob _).expects( where( (_:JobInstance, phase:Phase) => phase == Phase.BUILD) ).returning(jobJobToken) + (jobHook.finishJob _).expects(jobJobToken, Status.SUCCESS) + (jobHook.startTarget _).expects( where( (_:TargetInstance, phase:Phase, token:Option[JobToken]) => phase == Phase.BUILD && token == Some(jobJobToken))).returning(jobTargetToken) + (jobHook.finishTarget _).expects(jobTargetToken, Status.SUCCESS) + val namespaceHook = mock[Hook] + val namespaceJobToken = new JobToken {} + val namespaceTargetToken = new TargetToken {} + (namespaceHook.startJob _).expects( where( (_:JobInstance, phase:Phase) => phase == Phase.BUILD) ).returning(namespaceJobToken) + (namespaceHook.finishJob _).expects(namespaceJobToken, Status.SUCCESS) + (namespaceHook.startTarget _).expects( where( (_:TargetInstance, phase:Phase, token:Option[JobToken]) => phase == Phase.BUILD && token == Some(namespaceJobToken))).returning(namespaceTargetToken) + (namespaceHook.finishTarget _).expects(namespaceTargetToken, Status.SUCCESS) + + val ns = Namespace( + name = "default", + hooks = Seq(new Template[Hook] { + override def instantiate(context: Context): Hook = namespaceHook + }) + ) + val project = Project( + name = "default", + targets = Map("t0" -> NullTarget("t0", Map("p1" -> "$p1"))) + ) + val session = Session.builder() + .withNamespace(ns) + .withProject(project) + .build() + val job = Job.builder(session.getContext(project)) + .setName("job") + .addHook(jobHook) + .addParameter("p1", StringType) + .addTarget(TargetIdentifier("t0")) + .build() + + val runner = session.runner + runner.executeJob(job, Seq(Phase.BUILD), Map("p1" -> "v1")) should be (Status.SUCCESS) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerTestTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerTestTest.scala new file mode 100644 index 000000000..27389d5b3 --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/RunnerTestTest.scala @@ -0,0 +1,420 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.execution + +import org.apache.spark.storage.StorageLevel +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.model.Assertion +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.NamespaceWrapper +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.ProjectWrapper +import com.dimajix.flowman.model.Relation +import com.dimajix.flowman.model.Target +import com.dimajix.flowman.model.TargetIdentifier +import com.dimajix.flowman.model.Template +import com.dimajix.flowman.model.Test +import com.dimajix.spark.testing.LocalSparkSession + + +class RunnerTestTest extends AnyFlatSpec with MockFactory with Matchers with LocalSparkSession { + "The Runner for Tests" should "register override mappings and relations in tests" in { + val project = Project( + name = "default", + environment = Map( + "project_env" -> "project", + "project_env_to_overwrite" -> "project" + ) + ) + val session = Session.builder() + .withEnvironment("global_env", "global") + .withEnvironment("global_env_to_overwrite", "global") + .withProject(project) + .build() + val context = session.getContext(project) + val test = Test( + Test.Properties(context), + environment = Map( + "test_env" -> "test_env", + "project_env_to_overwrite" -> "test", + "global_env_to_overwrite" -> "test" + ) + ) + + val runner = session.runner + + runner.withTestContext(test, dryRun=false) { context => + context.environment.toMap should be(Map( + "global_env" -> "global", + "project_env" -> "project", + "project_env_to_overwrite" -> "test", + "test_env" -> "test_env", + "global_env_to_overwrite" -> "global", + "force" -> false, + "dryRun" -> false, + "project" -> ProjectWrapper(project), + "namespace" -> NamespaceWrapper(None) + )) + } + runner.withEnvironment(test, dryRun=false) { environment => + environment.toMap should be(Map( + "global_env" -> "global", + "project_env" -> "project", + "project_env_to_overwrite" -> "test", + "test_env" -> "test_env", + "global_env_to_overwrite" -> "global", + "force" -> false, + "dryRun" -> false, + "project" -> ProjectWrapper(project), + "namespace" -> NamespaceWrapper(None) + )) + } + } + + it should "correctly build targets and fixtures and check assertions" in { + val targetTemplate = mock[Template[Target]] + val target = mock[Target] + val relationTemplate = mock[Template[Relation]] + val mappingTemplate = mock[Template[Mapping]] + val project = Project( + name = "default", + targets = Map( + "target" -> targetTemplate + ), + relations = Map( + "rel" -> relationTemplate + ), + mappings = Map( + "map" -> mappingTemplate + ) + ) + val session = Session.builder() + .build() + val context = session.getContext(project) + + val fixtureTemplate = mock[Template[Target]] + val fixture = mock[Target] + val assertionTemplate = mock[Template[Assertion]] + val assertion = mock[Assertion] + val overrideRelationTemplate = mock[Template[Relation]] + val overrideMappingTemplate = mock[Template[Mapping]] + val overrideMapping = mock[Mapping] + val test = Test( + Test.Properties(context), + targets = Seq(TargetIdentifier("target")), + fixtures = Map( + "fixture" -> fixtureTemplate + ), + overrideRelations = Map( + "rel" -> overrideRelationTemplate + ), + overrideMappings = Map( + "map" -> overrideMappingTemplate + ), + assertions = Map( + "assert" -> assertionTemplate + ) + ) + + val runner = session.runner + + (targetTemplate.instantiate _).expects(*).returns(target) + (target.identifier _).expects().atLeastOnce().returns(TargetIdentifier("target", "default")) + (target.requires _).expects(*).atLeastOnce().returns(Set()) + (target.provides _).expects(*).atLeastOnce().returns(Set()) + (target.before _).expects().atLeastOnce().returns(Seq()) + (target.after _).expects().atLeastOnce().returns(Seq()) + (target.phases _).expects().atLeastOnce().returns(Set(Phase.CREATE, Phase.BUILD, Phase.VERIFY, Phase.TRUNCATE, Phase.DESTROY)) + (target.execute _).expects(*, Phase.CREATE).returns(Unit) + (target.execute _).expects(*, Phase.BUILD).returns(Unit) + (target.execute _).expects(*, Phase.VERIFY).returns(Unit) + (target.execute _).expects(*, Phase.DESTROY).returns(Unit) + + (fixtureTemplate.instantiate _).expects(*).returns(fixture) + (fixture.identifier _).expects().atLeastOnce().returns(TargetIdentifier("fixture", "default")) + (fixture.requires _).expects(*).atLeastOnce().returns(Set()) + (fixture.provides _).expects(*).atLeastOnce().returns(Set()) + (fixture.before _).expects().atLeastOnce().returns(Seq()) + (fixture.after _).expects().atLeastOnce().returns(Seq()) + (fixture.phases _).expects().atLeastOnce().returns(Set(Phase.CREATE, Phase.BUILD, Phase.VERIFY, Phase.TRUNCATE, Phase.DESTROY)) + (fixture.execute _).expects(*, Phase.CREATE).returns(Unit) + (fixture.execute _).expects(*, Phase.BUILD).returns(Unit) + (fixture.execute _).expects(*, Phase.VERIFY).returns(Unit) + (fixture.execute _).expects(*, Phase.DESTROY).returns(Unit) + + var assertionContext:Context = null + (assertionTemplate.instantiate _).expects(*).onCall { ctx:Context => + assertionContext = ctx + assertion + } + (assertion.description _).expects().atLeastOnce().returns(None) + (assertion.context _).expects().onCall(() => assertionContext) + (assertion.inputs _).expects().atLeastOnce().returns(Seq(MappingOutputIdentifier("map", "main", None))) + (assertion.execute _).expects(*,*).returns(Seq()) + + var overrideMappingContext:Context = null + (overrideMappingTemplate.instantiate _).expects(*).onCall { ctx:Context => + overrideMappingContext = ctx + overrideMapping + } + (overrideMapping.inputs _).expects().atLeastOnce().returns(Seq()) + (overrideMapping.outputs _).expects().atLeastOnce().returns(Seq("main")) + (overrideMapping.identifier _).expects().atLeastOnce().returns(MappingIdentifier("map")) + (overrideMapping.context _).expects().onCall(() => overrideMappingContext) + (overrideMapping.broadcast _).expects().returns(false) + (overrideMapping.checkpoint _).expects().returns(false) + (overrideMapping.cache _).expects().returns(StorageLevel.NONE) + (overrideMapping.execute _).expects(*,*).returns(Map("main" -> spark.emptyDataFrame)) + + runner.executeTest(test) should be (Status.SUCCESS) + } + + it should "not execute assertions in dry run mode" in { + val project = Project( + name = "default" + ) + val session = Session.builder() + .build() + val context = session.getContext(project) + + val assertionTemplate = mock[Template[Assertion]] + val assertion = mock[Assertion] + val test = Test( + Test.Properties(context), + assertions = Map( + "assert" -> assertionTemplate + ) + ) + + val runner = session.runner + + (assertionTemplate.instantiate _).expects(*).returns(assertion) + (assertion.description _).expects().atLeastOnce().returns(None) + + runner.executeTest(test, dryRun = true) should be (Status.SUCCESS) + } + + it should "ignore errors if told so" in { + val targetTemplate = mock[Template[Target]] + val target = mock[Target] + val mappingTemplate = mock[Template[Mapping]] + val mapping = mock[Mapping] + val project = Project( + name = "default", + targets = Map( + "target" -> targetTemplate + ), + mappings = Map( + "map" -> mappingTemplate + ) + ) + val session = Session.builder() + .build() + val context = session.getContext(project) + + val fixtureTemplate = mock[Template[Target]] + val fixture = mock[Target] + val assertionTemplate = mock[Template[Assertion]] + val assertion = mock[Assertion] + val test = Test( + Test.Properties(context), + targets = Seq(TargetIdentifier("target")), + fixtures = Map( + "fixture" -> fixtureTemplate + ), + assertions = Map( + "assert" -> assertionTemplate + ) + ) + + val runner = session.runner + + (targetTemplate.instantiate _).expects(*).returns(target) + (target.identifier _).expects().atLeastOnce().returns(TargetIdentifier("target", "default")) + (target.requires _).expects(*).atLeastOnce().returns(Set()) + (target.provides _).expects(*).atLeastOnce().returns(Set()) + (target.before _).expects().atLeastOnce().returns(Seq()) + (target.after _).expects().atLeastOnce().returns(Seq()) + (target.phases _).expects().atLeastOnce().returns(Set(Phase.CREATE, Phase.BUILD, Phase.VERIFY, Phase.TRUNCATE, Phase.DESTROY)) + (target.execute _).expects(*, Phase.CREATE).throws(new UnsupportedOperationException()) + (target.execute _).expects(*, Phase.BUILD).throws(new UnsupportedOperationException()) + (target.execute _).expects(*, Phase.VERIFY).throws(new UnsupportedOperationException()) + (target.execute _).expects(*, Phase.DESTROY).throws(new UnsupportedOperationException()) + + (fixtureTemplate.instantiate _).expects(*).returns(fixture) + (fixture.identifier _).expects().atLeastOnce().returns(TargetIdentifier("fixture", "default")) + (fixture.requires _).expects(*).atLeastOnce().returns(Set()) + (fixture.provides _).expects(*).atLeastOnce().returns(Set()) + (fixture.before _).expects().atLeastOnce().returns(Seq()) + (fixture.after _).expects().atLeastOnce().returns(Seq()) + (fixture.phases _).expects().atLeastOnce().returns(Set(Phase.CREATE, Phase.BUILD, Phase.VERIFY, Phase.TRUNCATE, Phase.DESTROY)) + (fixture.execute _).expects(*, Phase.CREATE).throws(new UnsupportedOperationException()) + (fixture.execute _).expects(*, Phase.BUILD).throws(new UnsupportedOperationException()) + (fixture.execute _).expects(*, Phase.VERIFY).throws(new UnsupportedOperationException()) + (fixture.execute _).expects(*, Phase.DESTROY).throws(new UnsupportedOperationException()) + + var assertionContext:Context = null + (assertionTemplate.instantiate _).expects(*).onCall { ctx:Context => + assertionContext = ctx + assertion + } + (assertion.context _).expects().onCall(() => assertionContext) + (assertion.description _).expects().atLeastOnce().returns(None) + (assertion.inputs _).expects().atLeastOnce().returns(Seq(MappingOutputIdentifier("map", "main", None))) + (assertion.execute _).expects(*,*).throws(new UnsupportedOperationException()) + + var mappingContext:Context = null + (mappingTemplate.instantiate _).expects(*).onCall { ctx:Context => + mappingContext = ctx + mapping + } + (mapping.inputs _).expects().atLeastOnce().returns(Seq()) + (mapping.outputs _).expects().atLeastOnce().returns(Seq("main")) + (mapping.identifier _).expects().atLeastOnce().returns(MappingIdentifier("map")) + (mapping.context _).expects().onCall(() => mappingContext) + (mapping.broadcast _).expects().returns(false) + (mapping.checkpoint _).expects().returns(false) + (mapping.cache _).expects().returns(StorageLevel.NONE) + (mapping.execute _).expects(*,*).returns(Map("main" -> spark.emptyDataFrame)) + + runner.executeTest(test, keepGoing = true) should be (Status.FAILED) + } + + it should "stop processing on the first exception" in { + val targetTemplate = mock[Template[Target]] + val target = mock[Target] + val project = Project( + name = "default", + targets = Map( + "target" -> targetTemplate + ) + ) + val session = Session.builder() + .build() + val context = session.getContext(project) + + val fixtureTemplate = mock[Template[Target]] + val fixture = mock[Target] + val assertionTemplate = mock[Template[Assertion]] + val test = Test( + Test.Properties(context), + targets = Seq(TargetIdentifier("target")), + fixtures = Map( + "fixture" -> fixtureTemplate + ), + assertions = Map( + "assert" -> assertionTemplate + ) + ) + + val runner = session.runner + + (targetTemplate.instantiate _).expects(*).returns(target) + (target.identifier _).expects().atLeastOnce().returns(TargetIdentifier("target", "default")) + (target.requires _).expects(*).atLeastOnce().returns(Set()) + (target.provides _).expects(*).atLeastOnce().returns(Set()) + (target.before _).expects().atLeastOnce().returns(Seq()) + (target.after _).expects().atLeastOnce().returns(Seq()) + (target.phases _).expects().atLeastOnce().returns(Set(Phase.CREATE, Phase.BUILD, Phase.VERIFY, Phase.TRUNCATE, Phase.DESTROY)) + (target.execute _).expects(*, Phase.CREATE).throws(new UnsupportedOperationException()) + (target.execute _).expects(*, Phase.DESTROY).throws(new UnsupportedOperationException()) + + (fixtureTemplate.instantiate _).expects(*).returns(fixture) + (fixture.identifier _).expects().atLeastOnce().returns(TargetIdentifier("fixture", "default")) + (fixture.requires _).expects(*).atLeastOnce().returns(Set()) + (fixture.provides _).expects(*).atLeastOnce().returns(Set()) + (fixture.before _).expects().atLeastOnce().returns(Seq()) + (fixture.after _).expects().atLeastOnce().returns(Seq()) + (fixture.phases _).expects().atLeastOnce().returns(Set(Phase.BUILD, Phase.VERIFY, Phase.TRUNCATE, Phase.DESTROY)) + + runner.executeTest(test, keepGoing = false) should be (Status.FAILED) + } + + it should "ignore exceptions in assertions if told so" in { + val project = Project( + name = "default" + ) + val session = Session.builder() + .build() + val context = session.getContext(project) + + val assertionTemplate1 = mock[Template[Assertion]] + val assertion1 = mock[Assertion] + val assertionTemplate2 = mock[Template[Assertion]] + val assertion2 = mock[Assertion] + val test = Test( + Test.Properties(context), + assertions = Map( + "assert1" -> assertionTemplate1, + "assert2" -> assertionTemplate2 + ) + ) + + val runner = session.runner + + (assertionTemplate1.instantiate _).expects(*).returns(assertion1) + (assertion1.context _).expects().returns(context) + (assertion1.description _).expects().atLeastOnce().returns(None) + (assertion1.inputs _).expects().atLeastOnce().returns(Seq()) + (assertion1.execute _).expects(*,*).throws(new UnsupportedOperationException()) + (assertionTemplate2.instantiate _).expects(*).returns(assertion2) + (assertion2.context _).expects().returns(context) + (assertion2.description _).expects().atLeastOnce().returns(None) + (assertion2.inputs _).expects().atLeastOnce().returns(Seq()) + (assertion2.execute _).expects(*,*).returns(Seq()) + + runner.executeTest(test, keepGoing = true) should be (Status.FAILED) + } + + it should "stop on the first exceptions in assertions if told so" in { + val project = Project( + name = "default" + ) + val session = Session.builder() + .build() + val context = session.getContext(project) + + val assertionTemplate1 = mock[Template[Assertion]] + val assertion1 = mock[Assertion] + val assertionTemplate2 = mock[Template[Assertion]] + val assertion2 = mock[Assertion] + val test = Test( + Test.Properties(context), + assertions = Map( + "assert1" -> assertionTemplate1, + "assert2" -> assertionTemplate2 + ) + ) + + val runner = session.runner + + (assertionTemplate1.instantiate _).expects(*).returns(assertion1) + (assertion1.context _).expects().returns(context) + (assertion1.description _).expects().atLeastOnce().returns(None) + (assertion1.inputs _).expects().atLeastOnce().returns(Seq()) + (assertion1.execute _).expects(*,*).throws(new UnsupportedOperationException()) + (assertionTemplate2.instantiate _).expects(*).returns(assertion2) + (assertion2.inputs _).expects().returns(Seq()) + + runner.executeTest(test, keepGoing = false) should be (Status.FAILED) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/SessionTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/SessionTest.scala index c87d4a066..d4db5f63f 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/execution/SessionTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/SessionTest.scala @@ -16,13 +16,13 @@ package com.dimajix.flowman.execution -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.model.Module -class SessionTest extends FlatSpec with Matchers { +class SessionTest extends AnyFlatSpec with Matchers { "A Session" should "be buildable" in { val session = Session.builder() .build() @@ -89,7 +89,7 @@ class SessionTest extends FlatSpec with Matchers { session.spark should not equal(newSession.spark) session.context should not equal(newSession.context) - session.executor should not equal(newSession.executor) + session.execution should not equal(newSession.execution) session.runner should not equal(newSession.runner) } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/StatusTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/StatusTest.scala index b737b5b72..7533813ab 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/execution/StatusTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/StatusTest.scala @@ -16,11 +16,11 @@ package com.dimajix.flowman.execution -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class StatusTest extends FlatSpec with Matchers { +class StatusTest extends AnyFlatSpec with Matchers { "Status.ofAll" should "return SUCCESS for empty lists" in { Status.ofAll(Seq())(identity) should be (Status.SUCCESS) } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/execution/TargetOrderingTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/execution/TargetOrderingTest.scala index ff5c9ae7a..5acadcc19 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/execution/TargetOrderingTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/execution/TargetOrderingTest.scala @@ -17,8 +17,8 @@ package com.dimajix.flowman.execution import org.apache.hadoop.fs.Path -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.model.BaseTarget import com.dimajix.flowman.model.ResourceIdentifier @@ -55,7 +55,7 @@ case class DummyTarget( } -class TargetOrderTest extends FlatSpec with Matchers { +class TargetOrderTest extends AnyFlatSpec with Matchers { "Ordering" should "work with simple resources" in { val session = Session.builder().build() val context = session.context diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/graph/GraphBuilderTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/graph/GraphBuilderTest.scala new file mode 100644 index 000000000..b11644652 --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/graph/GraphBuilderTest.scala @@ -0,0 +1,85 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.graph + +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.Template + + +class GraphBuilderTest extends AnyFlatSpec with Matchers with MockFactory { + "The GraphBuilder" should "work" in { + val mapping1 = mock[Mapping] + val mappingTemplate1 = mock[Template[Mapping]] + val mapping2 = mock[Mapping] + val mappingTemplate2 = mock[Template[Mapping]] + + val project = Project( + name = "project", + mappings = Map( + "m1" -> mappingTemplate1, + "m2" -> mappingTemplate2 + ) + ) + val session = Session.builder().build() + val context = session.getContext(project) + + (mappingTemplate1.instantiate _).expects(context).returns(mapping1) + (mapping1.context _).expects().returns(context) + (mapping1.kind _).expects().returns("m1_kind") + (mapping1.name _).expects().atLeastOnce().returns("m1") + (mapping1.link _).expects(*).onCall((l:Linker) => Some(1).foreach(_ => l.input(MappingIdentifier("m2"), "main"))) + (mappingTemplate2.instantiate _).expects(context).returns(mapping2) + (mapping2.context _).expects().returns(context) + (mapping2.kind _).expects().returns("m2_kind") + (mapping2.name _).expects().atLeastOnce().returns("m2") + (mapping2.link _).expects(*).returns(Unit) + + val graph = new GraphBuilder(context) + .addMapping(MappingIdentifier("m1")) + .build() + + val nodes = graph.nodes + + val ref1 = nodes.find(_.name == "m1").head.asInstanceOf[MappingRef] + val ref2 = nodes.find(_.name == "m2").head.asInstanceOf[MappingRef] + + ref1.category should be ("mapping") + ref1.kind should be ("m1_kind") + ref1.name should be ("m1") + ref1.mapping should be (mapping1) + ref1.incoming should be (Seq( + InputMapping(ref2, ref1, "main") + )) + ref1.outgoing should be (Seq()) + + ref2.category should be ("mapping") + ref2.kind should be ("m2_kind") + ref2.name should be ("m2") + ref2.mapping should be (mapping2) + ref2.incoming should be (Seq()) + ref2.outgoing should be (Seq( + InputMapping(ref2, ref1, "main") + )) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/graph/GraphTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/graph/GraphTest.scala new file mode 100644 index 000000000..9be03f3af --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/graph/GraphTest.scala @@ -0,0 +1,146 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.graph + +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.Relation +import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.Target +import com.dimajix.flowman.model.TargetIdentifier +import com.dimajix.flowman.model.Template + + +class GraphTest extends AnyFlatSpec with Matchers with MockFactory { + "Graph.ofProject" should "work" in { + val mapping1 = mock[Mapping] + val mappingTemplate1 = mock[Template[Mapping]] + val mapping2 = mock[Mapping] + val mappingTemplate2 = mock[Template[Mapping]] + val sourceRelation = mock[Relation] + val sourceRelationTemplate = mock[Template[Relation]] + val targetRelation = mock[Relation] + val targetRelationTemplate = mock[Template[Relation]] + val target = mock[Target] + val targetTemplate = mock[Template[Target]] + + val project = Project( + name = "project", + mappings = Map( + "m1" -> mappingTemplate1, + "m2" -> mappingTemplate2 + ), + targets = Map( + "t" -> targetTemplate + ), + relations = Map( + "src" -> sourceRelationTemplate, + "tgt" -> targetRelationTemplate + ) + ) + val session = Session.builder().build() + val context = session.getContext(project) + + (mappingTemplate1.instantiate _).expects(context).returns(mapping1) + (mapping1.context _).expects().returns(context) + (mapping1.name _).expects().atLeastOnce().returns("m1") + (mapping1.link _).expects(*).onCall((l:Linker) => Some(1).foreach(_ => l.input(MappingIdentifier("m2"), "main"))) + + (mappingTemplate2.instantiate _).expects(context).returns(mapping2) + (mapping2.context _).expects().returns(context) + (mapping2.name _).expects().atLeastOnce().returns("m2") + (mapping2.link _).expects(*).onCall((l:Linker) => Some(1).foreach(_ => l.read(RelationIdentifier("src"), Map()))) + + (sourceRelationTemplate.instantiate _).expects(context).returns(sourceRelation) + (sourceRelation.context _).expects().returns(context) + (sourceRelation.name _).expects().atLeastOnce().returns("src") + (sourceRelation.link _).expects(*).returns(Unit) + + (targetRelationTemplate.instantiate _).expects(context).returns(targetRelation) + (targetRelation.context _).expects().returns(context) + (targetRelation.name _).expects().atLeastOnce().returns("tgt") + (targetRelation.link _).expects(*).returns(Unit) + + (targetTemplate.instantiate _).expects(context).returns(target) + (target.context _).expects().returns(context) + (target.name _).expects().atLeastOnce().returns("t") + (target.link _).expects(*).onCall((l:Linker) => Some(1).foreach { _ => + l.input(MappingIdentifier("m1"), "main") + l.write(RelationIdentifier("tgt"), Map()) + }) + + val graph = Graph.ofProject(session, project) + + val nodes = graph.nodes + nodes.size should be (5) + nodes.find(_.name == "m1") should not be (None) + nodes.find(_.name == "m1").get shouldBe a[MappingRef] + nodes.find(_.name == "m2") should not be (None) + nodes.find(_.name == "m2").get shouldBe a[MappingRef] + nodes.find(_.name == "m3") should be (None) + nodes.find(_.name == "src") should not be (None) + nodes.find(_.name == "src").get shouldBe a[RelationRef] + nodes.find(_.name == "tgt") should not be (None) + nodes.find(_.name == "tgt").get shouldBe a[RelationRef] + nodes.find(_.name == "t") should not be (None) + nodes.find(_.name == "t").get shouldBe a[TargetRef] + + val edges = graph.edges + edges.size should be (4) + + val maps = graph.mappings + maps.size should be (2) + maps.find(_.name == "m1") should not be (None) + maps.find(_.name == "m2") should not be (None) + maps.find(_.name == "m3") should be (None) + val m1 = maps.find(_.name == "m1").get + val m2 = maps.find(_.name == "m2").get + + val tgts = graph.targets + tgts.size should be (1) + val t = tgts.head + + val rels = graph.relations + rels.size should be (2) + val src = rels.find(_.name == "src").get + val tgt = rels.find(_.name == "tgt").get + + m1.incoming should be (Seq(InputMapping(m2, m1, "main"))) + m1.outgoing should be (Seq(InputMapping(m1, t, "main"))) + m2.incoming should be (Seq(ReadRelation(src, m2, Map()))) + m2.outgoing should be (Seq(InputMapping(m2, m1, "main"))) + src.incoming should be (Seq()) + src.outgoing should be (Seq(ReadRelation(src, m2, Map()))) + t.incoming should be (Seq(InputMapping(m1, t, "main"))) + t.outgoing should be (Seq(WriteRelation(t, tgt, Map()))) + tgt.incoming should be (Seq(WriteRelation(t, tgt, Map()))) + tgt.outgoing should be (Seq()) + + graph.relation(RelationIdentifier("src")) should be (src) + graph.relation(sourceRelation) should be (src) + graph.mapping(MappingIdentifier("m1")) should be (m1) + graph.mapping(mapping1) should be (m1) + graph.target(TargetIdentifier("t")) should be (t) + graph.target(target) should be (t) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/graph/NodeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/graph/NodeTest.scala new file mode 100644 index 000000000..0941cbfe1 --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/graph/NodeTest.scala @@ -0,0 +1,76 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.graph + +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.Relation +import com.dimajix.flowman.model.Target + + +class NodeTest extends AnyFlatSpec with Matchers with MockFactory { + "A Node" should "generate a nice tree" in { + val srcRelation = mock[Relation] + (srcRelation.kind _).expects().atLeastOnce().returns("files") + (srcRelation.name _).expects().atLeastOnce().returns("source") + val readMapping = mock[Mapping] + (readMapping.kind _).expects().atLeastOnce().returns("read") + (readMapping.name _).expects().atLeastOnce().returns("read_source") + val mapping1 = mock[Mapping] + (mapping1.kind _).expects().atLeastOnce().returns("select") + (mapping1.name _).expects().atLeastOnce().returns("extract_data") + val mapping2 = mock[Mapping] + (mapping2.kind _).expects().atLeastOnce().returns("filter") + (mapping2.name _).expects().atLeastOnce().returns("filter_data") + val mapping3 = mock[Mapping] + (mapping3.kind _).expects().atLeastOnce().returns("extend") + (mapping3.name _).expects().atLeastOnce().returns("extend_data") + val unionMapping = mock[Mapping] + (unionMapping.kind _).expects().atLeastOnce().returns("union") + (unionMapping.name _).expects().atLeastOnce().returns("union_all") + val target = mock[Target] + (target.kind _).expects().atLeastOnce().returns("relation") + (target.name _).expects().atLeastOnce().returns("write_facts") + val tgtRelation = mock[Relation] + (tgtRelation.kind _).expects().atLeastOnce().returns("hive") + (tgtRelation.name _).expects().atLeastOnce().returns("facts") + + val srcRelationNode = RelationRef(1, srcRelation) + val readMappingNode = MappingRef(2, readMapping) + val mapping1Node = MappingRef(3, mapping1) + val mapping2Node = MappingRef(4, mapping2) + val mapping3Node = MappingRef(5, mapping3) + val unionMappingNode = MappingRef(6, unionMapping) + val targetNode = TargetRef(7, target) + val tgtRelationNode = RelationRef(8, tgtRelation) + + tgtRelationNode.inEdges.append(WriteRelation(targetNode, tgtRelationNode)) + targetNode.inEdges.append(InputMapping(unionMappingNode, targetNode)) + unionMappingNode.inEdges.append(InputMapping(mapping1Node, unionMappingNode)) + unionMappingNode.inEdges.append(InputMapping(mapping2Node, unionMappingNode)) + unionMappingNode.inEdges.append(InputMapping(mapping3Node, unionMappingNode)) + mapping1Node.inEdges.append(InputMapping(readMappingNode, mapping1Node)) + mapping2Node.inEdges.append(InputMapping(readMappingNode, mapping2Node)) + mapping3Node.inEdges.append(InputMapping(readMappingNode, mapping3Node)) + readMappingNode.inEdges.append(ReadRelation(srcRelationNode, readMappingNode)) + + println(tgtRelationNode.upstreamDependencyTree) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/hadoop/FileCollectorTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/hadoop/FileCollectorTest.scala index 4f29aec4b..082c78d13 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/hadoop/FileCollectorTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/hadoop/FileCollectorTest.scala @@ -21,8 +21,8 @@ import java.time.Month import org.apache.hadoop.fs.Path import org.apache.hadoop.fs.{FileSystem => HadoopFileSystem} import org.scalatest.BeforeAndAfterAll -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.catalog.PartitionSpec import com.dimajix.flowman.types.RangeValue @@ -30,7 +30,7 @@ import com.dimajix.flowman.types.TimestampType import com.dimajix.flowman.util.UtcTimestamp -class FileCollectorTest extends FlatSpec with Matchers with BeforeAndAfterAll { +class FileCollectorTest extends AnyFlatSpec with Matchers with BeforeAndAfterAll { var hadoopConf:org.apache.hadoop.conf.Configuration = _ var fileSystem: HadoopFileSystem = _ var workingDirectory:Path = _ diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/hadoop/FileTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/hadoop/FileTest.scala index d67fa78d6..4dbb79fce 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/hadoop/FileTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/hadoop/FileTest.scala @@ -16,13 +16,13 @@ package com.dimajix.flowman.hadoop -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.testing.LocalSparkSession -class FileTest extends FlatSpec with Matchers with LocalSparkSession { +class FileTest extends AnyFlatSpec with Matchers with LocalSparkSession { "A local File" should "be useable with simple strings" in { val conf = spark.sparkContext.hadoopConfiguration val fs = FileSystem(conf) diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/history/JdbcStateStoreTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/history/JdbcStateStoreTest.scala index ac772e19d..5e50e16a7 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/history/JdbcStateStoreTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/history/JdbcStateStoreTest.scala @@ -20,8 +20,8 @@ import java.nio.file.Files import java.nio.file.Path import org.scalatest.BeforeAndAfter -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.Status @@ -29,7 +29,7 @@ import com.dimajix.flowman.model.JobInstance import com.dimajix.flowman.model.TargetInstance -class JdbcStateStoreTest extends FlatSpec with Matchers with BeforeAndAfter { +class JdbcStateStoreTest extends AnyFlatSpec with Matchers with BeforeAndAfter { var tempDir: Path = _ before { diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/history/NullStateStoreTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/history/NullStateStoreTest.scala index 023e0dc80..88ad2b9b2 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/history/NullStateStoreTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/history/NullStateStoreTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.history -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.Status @@ -25,7 +25,7 @@ import com.dimajix.flowman.model.JobInstance import com.dimajix.flowman.model.TargetInstance -class NullStateStoreTest extends FlatSpec with Matchers { +class NullStateStoreTest extends AnyFlatSpec with Matchers { "The NullStateStore" should "support batches" in { val batch = JobInstance( "default", diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/jdbc/BaseDialectTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/jdbc/BaseDialectTest.scala index b642297e4..bf7992836 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/jdbc/BaseDialectTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/jdbc/BaseDialectTest.scala @@ -16,12 +16,12 @@ package com.dimajix.flowman.jdbc -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.catalog.PartitionSpec -class BaseDialectTest extends FlatSpec with Matchers { +class BaseDialectTest extends AnyFlatSpec with Matchers { "The BaseDialect" should "create PARTITION spects" in { val dialect = NoopDialect val partitionSpec = PartitionSpec(Map( diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/jdbc/DerbyJdbcTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/jdbc/DerbyJdbcTest.scala index a1a983917..30a1ae1ef 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/jdbc/DerbyJdbcTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/jdbc/DerbyJdbcTest.scala @@ -20,8 +20,8 @@ import java.nio.file.Path import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.types.Field import com.dimajix.flowman.types.IntegerType @@ -29,7 +29,7 @@ import com.dimajix.flowman.types.StringType import com.dimajix.spark.testing.LocalTempDir -class DerbyJdbcTest extends FlatSpec with Matchers with LocalTempDir { +class DerbyJdbcTest extends AnyFlatSpec with Matchers with LocalTempDir { var db:Path = _ var url:String = _ val driver = "org.apache.derby.jdbc.EmbeddedDriver" diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/metric/CounterAccumulatorMetricBundleTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/metric/CounterAccumulatorMetricBundleTest.scala index d6aeed3a1..c5c6aa47e 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/metric/CounterAccumulatorMetricBundleTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/metric/CounterAccumulatorMetricBundleTest.scala @@ -16,13 +16,13 @@ package com.dimajix.flowman.metric -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.accumulator.CounterAccumulator -class CounterAccumulatorMetricBundleTest extends FlatSpec with Matchers { +class CounterAccumulatorMetricBundleTest extends AnyFlatSpec with Matchers { "A CounterAccumulatorMetricBundle" should "provide all metrics" in { val accumulator = new CounterAccumulator() accumulator.add(Map("a" -> 1l, "b" -> 2l)) diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/metric/MetricBoardTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/metric/MetricBoardTest.scala index 5f58babfe..8417fe4e2 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/metric/MetricBoardTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/metric/MetricBoardTest.scala @@ -16,15 +16,15 @@ package com.dimajix.flowman.metric -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.execution.Status import com.dimajix.spark.accumulator.CounterAccumulator -class MetricBoardTest extends FlatSpec with Matchers { +class MetricBoardTest extends AnyFlatSpec with Matchers { "A MetricBoard" should "return relabelled metrics" in { val session = Session.builder() .withEnvironment("env_var", "env_value") diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/metric/MetricSystemTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/metric/MetricSystemTest.scala index 2db5bbc6a..110d1015f 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/metric/MetricSystemTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/metric/MetricSystemTest.scala @@ -16,13 +16,13 @@ package com.dimajix.flowman.metric -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.accumulator.CounterAccumulator -class MetricSystemTest extends FlatSpec with Matchers { +class MetricSystemTest extends AnyFlatSpec with Matchers { "The MetricSystem" should "return all metrics" in { val registry = new MetricSystem diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/model/IdentifierTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/model/IdentifierTest.scala index da680a282..7d3c4bb4b 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/model/IdentifierTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/model/IdentifierTest.scala @@ -16,11 +16,11 @@ package com.dimajix.flowman.model -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class IdentifierTest extends FlatSpec with Matchers { +class IdentifierTest extends AnyFlatSpec with Matchers { "The TableIdentifier" should "be parsed correctly" in { MappingIdentifier.parse("lala") should be (new MappingIdentifier("lala", None)) MappingIdentifier.parse("project/lala") should be (new MappingIdentifier("lala", Some("project"))) diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/model/JobTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/model/JobTest.scala index 767b41846..e4bceea94 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/model/JobTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/model/JobTest.scala @@ -16,9 +16,9 @@ package com.dimajix.flowman.model -import org.scalatest.FlatSpec -import org.scalatest.Matchers -import org.scalatestplus.mockito.MockitoSugar +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.types.ArrayValue @@ -28,7 +28,63 @@ import com.dimajix.flowman.types.SingleValue import com.dimajix.flowman.types.StringType -class JobTest extends FlatSpec with Matchers with MockitoSugar { +class JobTest extends AnyFlatSpec with Matchers with MockFactory { + "Job.Builder" should "work" in { + val session = Session.builder().build() + val context = session.context + val job = Job.builder(context) + .setProperties(Job.Properties(context, "some_job")) + .setDescription("Some job") + .setParameters(Seq(Job.Parameter("p1", IntegerType))) + .addParameter(Job.Parameter("p2", IntegerType)) + .addParameter("p3", StringType) + .setEnvironment(Map("env1" -> "eval_1")) + .addEnvironment("env2", "eval_2") + .build() + + job.name should be ("some_job") + job.description should be (Some("Some job")) + job.parameters should be (Seq( + Job.Parameter("p1", IntegerType), + Job.Parameter("p2", IntegerType), + Job.Parameter("p3", StringType) + )) + job.environment should be (Map( + "env1" -> "eval_1", + "env2" -> "eval_2" + )) + + val instance = job.instance(Map("p1" -> "val1", "p2" -> "val2", "p3" -> "val3")) + instance.job should be ("some_job") + instance.namespace should be ("") + instance.project should be ("") + instance.args should be(Map("p1" -> "val1", "p2" -> "val2", "p3" -> "val3")) + instance.asMap should be (Map( + "job" -> "some_job", + "name" -> "some_job", + "namespace" -> "", + "project" -> "", + "p1" -> "val1", "p2" -> "val2", "p3" -> "val3" + )) + } + + "Job.arguments" should "parse arguments" in { + val session = Session.builder().build() + val context = session.context + val job = Job.builder(context) + .addParameter("p1", IntegerType, Some("2")) + .addParameter("p2", StringType) + .build() + + job.arguments(Map( + "p1" -> "17", + "p2" -> "lala" + )) should be (Map( + "p1" -> 17, + "p2" -> "lala" + )) + } + "Job.parseArguments" should "parse arguments" in { val session = Session.builder().build() val context = session.context @@ -37,14 +93,14 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { .addParameter("p2", StringType) .build() - val arguments = job.parseArguments(Map( + job.parseArguments(Map( "p1:start" -> "17", "p1:end" -> "27", "p2" -> "lala" + )) should be (Map( + "p1" -> RangeValue("17", "27"), + "p2" -> SingleValue("lala") )) - - arguments("p1") should be (RangeValue("17", "27")) - arguments("p2") should be (SingleValue("lala")) } it should "throw an exception for unknown parameters" in { @@ -68,12 +124,10 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { .addParameter("p2", StringType) .build() - val args = job.interpolate(Map( + job.interpolate(Map( "p1"-> RangeValue("2", "8"), "p2" -> ArrayValue("x", "y", "z") - )) - - args.toSet should be (Set( + )).toSet should be (Set( Map("p1" -> 2, "p2" -> "x"), Map("p1" -> 4, "p2" -> "x"), Map("p1" -> 6, "p2" -> "x"), @@ -130,4 +184,38 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { Map("p1" -> 6) )) } + + "Job.merge" should "correctly merge Jobs" in { + val session = Session.builder().build() + val context = session.context + + val job = Job.builder(context) + .setProperties(Job.Properties(context, "some_job")) + .setDescription("Some job") + .setParameters(Seq(Job.Parameter("p1", IntegerType))) + .addParameter("p3", StringType) + .setEnvironment(Map("env1" -> "eval_1", "env2" -> "eval_2", "p2" -> "17")) + .build() + val parent = Job.builder(context) + .setProperties(Job.Properties(context, "parent_job")) + .setDescription("Some parent job") + .setParameters(Seq(Job.Parameter("p1", IntegerType), Job.Parameter("p2", IntegerType), Job.Parameter("p4", IntegerType))) + .setEnvironment(Map("env1" -> "parent_val_1", "env4" -> "parent_val_4")) + .build() + + val result = Job.merge(job, Seq(parent)) + result.name should be ("some_job") + result.description should be (Some("Some job")) + result.parameters should be (Seq( + Job.Parameter("p1", IntegerType), + Job.Parameter("p4", IntegerType), + Job.Parameter("p3", StringType) + )) + result.environment should be (Map( + "env1" -> "eval_1", + "env2" -> "eval_2", + "env4" -> "parent_val_4", + "p2" -> "17" + )) + } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/model/MappingTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/model/MappingTest.scala new file mode 100644 index 000000000..1d7fb35d6 --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/model/MappingTest.scala @@ -0,0 +1,144 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.model + +import org.apache.spark.sql.DataFrame +import org.apache.spark.sql.{functions => f} +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.execution.NoSuchMappingOutputException +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.graph.GraphBuilder +import com.dimajix.flowman.graph.InputMapping +import com.dimajix.flowman.model.MappingTest.DummyMapping +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.IntegerType +import com.dimajix.flowman.types.LongType +import com.dimajix.flowman.types.StringType +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +object MappingTest { + class DummyMapping(props:Mapping.Properties, ins:Seq[MappingOutputIdentifier]) extends BaseMapping { + protected override def instanceProperties: Mapping.Properties = props + + override def inputs: Seq[MappingOutputIdentifier] = ins + + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = { + val df = input.head._2.groupBy("id").agg(f.sum("val")) + Map("main" -> df) + } + } +} + +class MappingTest extends AnyFlatSpec with Matchers with MockFactory with LocalSparkSession { + "Mapping.output" should "return a MappingOutputIdentifier with a project" in { + val project = Project( + name = "project" + ) + val session = Session.builder().build() + val context = session.getContext(project) + + val mapping = new DummyMapping( + Mapping.Properties(context, "m1"), + Seq() + ) + mapping.output("main") should be (MappingOutputIdentifier("project/m1:main")) + an[NoSuchMappingOutputException] should be thrownBy(mapping.output("no_such_output")) + } + + it should "return a MappingOutputIdentifier without a project" in { + val session = Session.builder().build() + val context = session.context + + val mapping = new DummyMapping( + Mapping.Properties(context, "m1"), + Seq() + ) + mapping.output("main") should be (MappingOutputIdentifier("m1:main")) + an[NoSuchMappingOutputException] should be thrownBy(mapping.output("no_such_output")) + } + + "Mapping.describe default implementation" should "return meaningful results" in { + val session = Session.builder().build() + val context = session.context + val execution = session.execution + + val mapping = new DummyMapping( + Mapping.Properties(context, "m1"), + Seq(MappingOutputIdentifier("input:main")) + ) + + val inputSchema = StructType(Seq( + Field("id", StringType), + Field("val", IntegerType), + Field("comment", StringType) + )) + val result = mapping.describe(execution, Map(MappingOutputIdentifier("input:main") -> inputSchema)) + + result("main") should be (StructType(Seq( + Field("id", StringType), + Field("sum(val)", LongType) + ))) + } + + "Mapping.link default implementation" should "work" in { + val mappingTemplate1 = mock[Template[Mapping]] + val mappingTemplate2 = mock[Template[Mapping]] + + val project = Project( + name = "project", + mappings = Map( + "m1" -> mappingTemplate1, + "m2" -> mappingTemplate2 + ) + ) + val session = Session.builder().build() + val context = session.getContext(project) + + val mapping1 = new DummyMapping( + Mapping.Properties(context, "m1"), + Seq(MappingOutputIdentifier("m2")) + ) + val mapping2 = new DummyMapping( + Mapping.Properties(context, "m2"), + Seq() + ) + //(mappingTemplate1.instantiate _).expects(context).returns(mapping1) + (mappingTemplate2.instantiate _).expects(context).returns(mapping2) + + val graphBuilder = new GraphBuilder(context) + val ref1 = graphBuilder.refMapping(mapping1) + val ref2 = graphBuilder.refMapping(mapping2) + + ref1.mapping should be (mapping1) + ref1.incoming should be (Seq( + InputMapping(ref2, ref1, "main") + )) + ref1.outgoing should be (Seq()) + + ref2.mapping should be (mapping2) + ref2.incoming should be (Seq()) + ref2.outgoing should be (Seq( + InputMapping(ref2, ref1, "main") + )) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/model/NamespaceTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/model/NamespaceTest.scala index 241f29f0c..4521da517 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/model/NamespaceTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/model/NamespaceTest.scala @@ -16,11 +16,11 @@ package com.dimajix.flowman.model -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class NamespaceTest extends FlatSpec with Matchers { +class NamespaceTest extends AnyFlatSpec with Matchers { "A Namespace" should "provide a default Namespace" in { val ns = Namespace.read.default() ns should not be (null) diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/model/PartitionSchemaTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/model/PartitionSchemaTest.scala index b8e854407..13108d919 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/model/PartitionSchemaTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/model/PartitionSchemaTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.model -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.catalog.PartitionSpec import com.dimajix.flowman.types.ArrayValue @@ -27,7 +27,7 @@ import com.dimajix.flowman.types.SingleValue import com.dimajix.flowman.types.StringType -class PartitionSchemaTest extends FlatSpec with Matchers { +class PartitionSchemaTest extends AnyFlatSpec with Matchers { "The PartitionSchema" should "provide partition column names" in { val partitionColumns = Seq( PartitionField("p1", StringType), diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/model/ResourceIdentifierTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/model/ResourceIdentifierTest.scala index ca6f292ce..494bb4ca4 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/model/ResourceIdentifierTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/model/ResourceIdentifierTest.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018 Kaya Kupferschmidt + * Copyright 2018-2021 Kaya Kupferschmidt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,13 +16,22 @@ package com.dimajix.flowman.model +import java.io.File + import org.apache.hadoop.fs.Path -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +class ResourceIdentifierTest extends AnyFlatSpec with Matchers { + "A ResourceIdentifier" should "support basic methods" in { + val id = ResourceIdentifier.ofHivePartition("some_table", None, Map("p1" -> "xyz")) + id.category should be ("hiveTablePartition") + id.name should be ("some_table") + id.partition should be (Map("p1" -> "xyz")) + } -class ResourceIdentifierTest extends FlatSpec with Matchers { - "A ResourceIdentifier" should "support 'contains' with partitions" in { + it should "support 'contains' with partitions" in { val id = ResourceIdentifier.ofHivePartition("some_table", None, Map("p1" -> "xyz")) id.contains(ResourceIdentifier.ofHivePartition("some_table", None, Map("p1" -> "xyz"))) should be (true) id.contains(ResourceIdentifier.ofHivePartition("some_table", None, Map("p1" -> "zyx"))) should be (false) @@ -82,7 +91,7 @@ class ResourceIdentifierTest extends FlatSpec with Matchers { id.contains(ResourceIdentifier.ofFile(new Path("C:/Temp/1572861822921-0/topic=publish.Card.test.dev/processing_date=2019-03-20"))) should be (true) } - it should "support character sets" in { + it should "support character sets in regex" in { val id = ResourceIdentifier.ofHiveTable("table_[0-9]+") id.contains(ResourceIdentifier.ofHiveTable("table_[0-9]+")) should be (true) id.contains(ResourceIdentifier.ofHiveTable("table_+")) should be (false) @@ -92,4 +101,76 @@ class ResourceIdentifierTest extends FlatSpec with Matchers { id.contains(ResourceIdentifier.ofHiveTable("table_0x")) should be (false) id.contains(ResourceIdentifier.ofHiveTable("table_0+")) should be (false) } + + it should "support changing partitions" in { + val id = ResourceIdentifier.ofHivePartition("some_table", None, Map("p1" -> "xyz")) + id.partition should be (Map("p1" -> "xyz")) + + val id2 = id.withPartition(Map("p2" -> "abc")) + id2 should be (ResourceIdentifier.ofHivePartition("some_table", None, Map("p2" -> "abc"))) + id2.partition should be (Map("p2" -> "abc")) + } + + it should "enumerate all super-partitions" in { + val id = ResourceIdentifier.ofHivePartition("some_table", None, Map("p1" -> "xyz", "p2" -> "abc")) + id.partition should be (Map("p1" -> "xyz", "p2" -> "abc")) + id.explodePartitions() should be (Seq( + ResourceIdentifier.ofHivePartition("some_table", None, Map()), + ResourceIdentifier.ofHivePartition("some_table", None, Map("p1" -> "xyz")), + ResourceIdentifier.ofHivePartition("some_table", None, Map("p2" -> "abc")), + ResourceIdentifier.ofHivePartition("some_table", None, Map("p1" -> "xyz", "p2" -> "abc")) + )) + } + + it should "support files" in { + val id = ResourceIdentifier.ofFile(new Path("/path/?/with/wildcard")) + id should be (GlobbingResourceIdentifier("file", "/path/?/with/wildcard")) + id.isEmpty should be (false) + id.nonEmpty should be (true) + id.category should be ("file") + id.name should be ("/path/?/with/wildcard") + id.partition should be (Map()) + + ResourceIdentifier.ofFile(new Path("file:/path/?/with/wildcard")) should be (GlobbingResourceIdentifier("file", "file:/path/?/with/wildcard")) + } + + it should "support local files" in { + ResourceIdentifier.ofLocal(new Path("/path/?/with/wildcard")) should be (GlobbingResourceIdentifier("local", "/path/?/with/wildcard")) + ResourceIdentifier.ofLocal(new Path("file:/path/?/with/wildcard")) should be (GlobbingResourceIdentifier("local", "/path/?/with/wildcard")) + ResourceIdentifier.ofLocal(new File("/path/?/with/wildcard")) should be (GlobbingResourceIdentifier("local", "/path/?/with/wildcard")) + } + + it should "support Hive databases" in { + val id = ResourceIdentifier.ofHiveDatabase("db") + id should be (RegexResourceIdentifier("hiveDatabase", "db")) + id.isEmpty should be (false) + id.nonEmpty should be (true) + id.category should be ("hiveDatabase") + id.name should be ("db") + } + + it should "support Hive tables" in { + ResourceIdentifier.ofHiveTable("table") should be (RegexResourceIdentifier("hiveTable", "table")) + ResourceIdentifier.ofHiveTable("table", None) should be (RegexResourceIdentifier("hiveTable", "table")) + ResourceIdentifier.ofHiveTable("table", Some("db")) should be (RegexResourceIdentifier("hiveTable", "db.table")) + } + + it should "support Hive table partitions" in { + ResourceIdentifier.ofHivePartition("table", Some("db"), Map("p1" -> "v1", "p2" -> 2)) should be (RegexResourceIdentifier("hiveTablePartition", "db.table", Map("p1" -> "v1", "p2" -> "2"))) + ResourceIdentifier.ofHivePartition("table", None, Map("p1" -> "v1", "p2" -> 2)) should be (RegexResourceIdentifier("hiveTablePartition", "table", Map("p1" -> "v1", "p2" -> "2"))) + } + + it should "support JDBC databases" in { + ResourceIdentifier.ofJdbcDatabase("db") should be (RegexResourceIdentifier("jdbcDatabase", "db")) + } + + it should "support JDBC tables" in { + ResourceIdentifier.ofJdbcTable("table", None) should be (RegexResourceIdentifier("jdbcTable", "table")) + ResourceIdentifier.ofJdbcTable("table", Some("db")) should be (RegexResourceIdentifier("jdbcTable", "db.table")) + } + + it should "support JDBC table partitions" in { + ResourceIdentifier.ofJdbcTablePartition("table", Some("db"), Map("p1" -> "v1", "p2" -> 2)) should be (RegexResourceIdentifier("jdbcTablePartition", "db.table", Map("p1" -> "v1", "p2" -> "2"))) + ResourceIdentifier.ofJdbcTablePartition("table", None, Map("p1" -> "v1", "p2" -> 2)) should be (RegexResourceIdentifier("jdbcTablePartition", "table", Map("p1" -> "v1", "p2" -> "2"))) + } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/storage/FileStoreTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/storage/FileStoreTest.scala index 14b57ceb5..c0632db27 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/storage/FileStoreTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/storage/FileStoreTest.scala @@ -16,13 +16,13 @@ package com.dimajix.flowman.storage -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session -class FileStoreTest extends FlatSpec with Matchers { +class FileStoreTest extends AnyFlatSpec with Matchers { "A FileStore" should "list all projects" in { val session = Session.builder() .build() diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/templating/TemplatingTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/templating/TemplatingTest.scala index 29cfcbe5a..baaefc147 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/templating/TemplatingTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/templating/TemplatingTest.scala @@ -21,8 +21,8 @@ import java.sql.Date import java.time.LocalDate import java.time.Month -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.annotation.TemplateObject import com.dimajix.flowman.util.UtcTimestamp @@ -36,7 +36,7 @@ object TemplatingTest { } -class TemplatingTest extends FlatSpec with Matchers { +class TemplatingTest extends AnyFlatSpec with Matchers { private val engine = Velocity.newEngine() private val context = Velocity.newContext() diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/templating/VelocityTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/templating/VelocityTest.scala index 93181ccb7..851bd5e71 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/templating/VelocityTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/templating/VelocityTest.scala @@ -19,11 +19,11 @@ package com.dimajix.flowman.templating import java.io.StringWriter import org.apache.velocity.exception.MethodInvocationException -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class VelocityTest extends FlatSpec with Matchers { +class VelocityTest extends AnyFlatSpec with Matchers { private val engine = Velocity.newEngine() private val context = Velocity.newContext() diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/AssemblerTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/AssemblerTest.scala index 2fa3be22e..508a8dcff 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/AssemblerTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/AssemblerTest.scala @@ -22,15 +22,15 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.transforms.schema.Path import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class AssemblerTest extends FlatSpec with Matchers with LocalSparkSession { +class AssemblerTest extends AnyFlatSpec with Matchers with LocalSparkSession { private val inputJson = """ |{ @@ -40,8 +40,7 @@ class AssemblerTest extends FlatSpec with Matchers with LocalSparkSession { | "other_field":456 | } | }, - | "lala": { - | }, + | "lala": 23, | "embedded" : { | "structure": { | "secret": { @@ -98,7 +97,7 @@ class AssemblerTest extends FlatSpec with Matchers with LocalSparkSession { ) .columns( _.path(Path("")) - .keep(Seq(Path("lala"), Path("lolo"))) + .keep(Seq(Path("lala"))) ) .columns( _.path(Path("")) @@ -122,6 +121,7 @@ class AssemblerTest extends FlatSpec with Matchers with LocalSparkSession { StructField("other_field", LongType) ))) ))), + StructField("lala", LongType), StructField("embedded", StructType(Seq( StructField("struct_array", ArrayType( StructType(Seq( @@ -133,6 +133,7 @@ class AssemblerTest extends FlatSpec with Matchers with LocalSparkSession { StructField("public", StringType) ))) ))), + StructField("lala", LongType), StructField("sub_structure", StructType(Seq( StructField("value", ArrayType(LongType)) ))) @@ -145,6 +146,21 @@ class AssemblerTest extends FlatSpec with Matchers with LocalSparkSession { resultSchema.sparkType should be (resultDf.schema) } + it should "throw an exception on missing columns in 'keep'" in { + val asm = Assembler.builder() + .nest("clever_name")( + _.path(Path("stupidName")) + .drop(Path("secret.field")) + ) + .columns( + _.path(Path("")) + .keep(Seq(Path("lala"), Path("lolo"))) + ) + .build() + + an[AnalysisException] shouldBe thrownBy(asm.reassemble(inputDf)) + } + it should "support keep" in { val asm = Assembler.builder() .columns( diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/CaseFormatterTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/CaseFormatterTest.scala index 95fcb57ad..da0f51363 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/CaseFormatterTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/CaseFormatterTest.scala @@ -21,14 +21,14 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class CaseFormatterTest extends FlatSpec with Matchers with LocalSparkSession { +class CaseFormatterTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The CaseFormatter" should "work with nested schemas" in { val inputJson = """ diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/ExplodeTransformerTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/ExplodeTransformerTest.scala index 562b470a4..68afa56d3 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/ExplodeTransformerTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/ExplodeTransformerTest.scala @@ -21,15 +21,15 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.transforms.schema.Path import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class ExplodeTransformerTest extends FlatSpec with Matchers with LocalSparkSession { +class ExplodeTransformerTest extends AnyFlatSpec with Matchers with LocalSparkSession { private var inputDf: DataFrame = _ private var inputSchema: ftypes.StructType = _ diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/FlattenTransformerTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/FlattenTransformerTest.scala index bffd53162..edc146217 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/FlattenTransformerTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/FlattenTransformerTest.scala @@ -21,14 +21,14 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class FlattenTransformerTest extends FlatSpec with Matchers with LocalSparkSession { +class FlattenTransformerTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The FlattenTransformer" should "work with nested schemas" in { val inputJson = """ diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/LiftTransformerTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/LiftTransformerTest.scala index bcdb04ae9..66294f545 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/LiftTransformerTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/LiftTransformerTest.scala @@ -21,15 +21,15 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.transforms.schema.Path import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class LiftTransformerTest extends FlatSpec with Matchers with LocalSparkSession { +class LiftTransformerTest extends AnyFlatSpec with Matchers with LocalSparkSession { private var inputDf: DataFrame = _ private var inputSchema: ftypes.StructType = _ diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/SchemaEnforcerTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/SchemaEnforcerTest.scala index 9f504df34..ea3216278 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/SchemaEnforcerTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/SchemaEnforcerTest.scala @@ -23,13 +23,13 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.testing.LocalSparkSession -class SchemaEnforcerTest extends FlatSpec with Matchers with LocalSparkSession { +class SchemaEnforcerTest extends AnyFlatSpec with Matchers with LocalSparkSession { "A conforming schema" should "be generated for simple cases" in { val inputSchema = StructType(Seq( StructField("col1", StringType), @@ -53,29 +53,6 @@ class SchemaEnforcerTest extends FlatSpec with Matchers with LocalSparkSession { ))) } - it should "support a list of columns an types" in { - val inputSchema = StructType(Seq( - StructField("col1", StringType), - StructField("COL2", IntegerType), - StructField("col3", IntegerType) - )) - val requestedSchema = Seq( - "col2" -> "string", - "col1" -> "string", - "col4" -> "int" - ) - - val xfs = SchemaEnforcer(requestedSchema) - val columns = xfs.transform(inputSchema) - val inputDf = spark.createDataFrame(spark.sparkContext.emptyRDD[Row], inputSchema) - val outputDf = inputDf.select(columns:_*) - outputDf.schema should be (StructType(Seq( - StructField("col2", StringType), - StructField("col1", StringType), - StructField("col4", IntegerType) - ))) - } - it should "work with nested entities" in { val inputSchema = StructType(Seq( StructField("col1", StringType), diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/TypeReplacerTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/TypeReplacerTest.scala index 7664673bb..ecf5ed83d 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/TypeReplacerTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/TypeReplacerTest.scala @@ -23,14 +23,14 @@ import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class TypeReplacerTest extends FlatSpec with Matchers with LocalSparkSession { +class TypeReplacerTest extends AnyFlatSpec with Matchers with LocalSparkSession { private val inputJson = """ |{ diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/UnionTransformerTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/UnionTransformerTest.scala index 8de165c56..7d8a64145 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/UnionTransformerTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/UnionTransformerTest.scala @@ -21,8 +21,8 @@ import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession @@ -34,7 +34,7 @@ object UnionTransformerTest { case class Class3(col0:Double, col1:Int) } -class UnionTransformerTest extends FlatSpec with Matchers with LocalSparkSession { +class UnionTransformerTest extends AnyFlatSpec with Matchers with LocalSparkSession { import com.dimajix.flowman.transforms.UnionTransformerTest._ "The UnionTransformer" should "work" in { diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/ColumnTreeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/ColumnTreeTest.scala index 808e1fa02..54d077b48 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/ColumnTreeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/ColumnTreeTest.scala @@ -25,11 +25,11 @@ import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class ColumnTreeTest extends FlatSpec with Matchers { +class ColumnTreeTest extends AnyFlatSpec with Matchers { import com.dimajix.flowman.transforms.schema.ColumnTree.implicits._ "The ColumnTree" should "create the same schema via round-trip" in { diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/PathTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/PathTest.scala index bb34361c6..6b4991d51 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/PathTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/PathTest.scala @@ -16,11 +16,11 @@ package com.dimajix.flowman.transforms.schema -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class PathTest extends FlatSpec with Matchers { +class PathTest extends AnyFlatSpec with Matchers { "A Path" should "be constructible from a string" in { val path = Path("root") path.segments.size should be (1) diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/SchemaTreeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/SchemaTreeTest.scala index d51ed6aa2..199cbe2d0 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/SchemaTreeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/transforms/schema/SchemaTreeTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.transforms.schema -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.types.ArrayType import com.dimajix.flowman.types.Field @@ -27,7 +27,7 @@ import com.dimajix.flowman.types.StringType import com.dimajix.flowman.types.StructType -class SchemaTreeTest extends FlatSpec with Matchers { +class SchemaTreeTest extends AnyFlatSpec with Matchers { import com.dimajix.flowman.transforms.schema.SchemaTree.implicits._ "The SchemaTree" should "create the same schema via round-trip" in { diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/ArrayTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/ArrayTypeTest.scala index 521b6e264..74f761937 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/ArrayTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/ArrayTypeTest.scala @@ -16,14 +16,14 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.util.ObjectMapper -class ArrayTypeTest extends FlatSpec with Matchers { +class ArrayTypeTest extends AnyFlatSpec with Matchers { "An ArrayType" should "be deserializable" in { val spec = """ diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/AvroSchemaUtilsTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/AvroSchemaUtilsTest.scala index 51cc9ab47..dde1ff614 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/AvroSchemaUtilsTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/AvroSchemaUtilsTest.scala @@ -19,11 +19,11 @@ package com.dimajix.flowman.types import scala.collection.JavaConverters._ import org.apache.avro.Schema.Type._ -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class AvroSchemaUtilsTest extends FlatSpec with Matchers { +class AvroSchemaUtilsTest extends AnyFlatSpec with Matchers { "AvroSchemaUtils" should "convert a Flowman schema of primitives to an Avro schema" in { val schema = Seq( Field("str_field", StringType, false), @@ -63,10 +63,82 @@ class AvroSchemaUtilsTest extends FlatSpec with Matchers { fields(8).name() should be ("float_field") fields(9).schema().getType should be (DOUBLE) fields(9).name() should be ("double_field") - fields(10).schema().getType should be (LONG) + fields(10).schema().getType should be (INT) fields(10).name() should be ("date_field") + //fields(10).schema().getLogicalType.getName should be ("date") fields(11).schema().getType should be (LONG) fields(11).name() should be ("timestamp_field") + //fields(11).schema().getLogicalType.getName should be ("timestamp-millis") + + println(result.toString(true).replace("\r\n", "\n")) + } + + it should "support all primitive types" in { + val spec = + """ + |{ + | "type" : "record", + | "name" : "topLevelRecord", + | "fields" : [ { + | "name" : "str_field", + | "type" : "string" + | }, { + | "name" : "int_field", + | "type" : "int" + | }, { + | "name" : "long_field", + | "type" : "long" + | }, { + | "name" : "short_field", + | "type" : "int" + | }, { + | "name" : "bool_field", + | "type" : "boolean" + | }, { + | "name" : "bin_field", + | "type" : "bytes" + | }, { + | "name" : "char_field", + | "type" : "string" + | }, { + | "name" : "varchar_field", + | "type" : "string" + | }, { + | "name" : "float_field", + | "type" : "float" + | }, { + | "name" : "double_field", + | "type" : "double" + | }, { + | "name" : "date_field", + | "type" : { "type" : "int", "logicalType" : "date" } + | }, { + | "name" : "nullable_date_field", + | "type" : [ { "type" : "int", "logicalType" : "date" }, "null" ] + | }, { + | "name" : "timestamp_field", + | "type" : { "type" : "long", "logicalType" : "timestamp-millis" } + | } ] + |} + |""".stripMargin + + val avroSchema = new org.apache.avro.Schema.Parser().parse(spec) + val fields = AvroSchemaUtils.fromAvro(avroSchema) + fields should be (Seq( + Field("str_field", StringType, false), + Field("int_field", IntegerType, false), + Field("long_field", LongType, false), + Field("short_field", IntegerType, false), + Field("bool_field", BooleanType, false), + Field("bin_field", BinaryType, false), + Field("char_field", StringType, false), + Field("varchar_field", StringType, false), + Field("float_field", FloatType, false), + Field("double_field", DoubleType, false), + Field("date_field", DateType, false), + Field("nullable_date_field", DateType, true), + Field("timestamp_field", TimestampType, false) + )) } it should "support nullable fields via unions" in { diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/BinaryTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/BinaryTypeTest.scala new file mode 100644 index 000000000..acf862acc --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/BinaryTypeTest.scala @@ -0,0 +1,44 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.types + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.util.ObjectMapper + + +class BinaryTypeTest extends AnyFlatSpec with Matchers { + "A BinaryType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("binary") should be(BinaryType) + } + + it should "not support parsing" in { + an[NotImplementedError] should be thrownBy (BinaryType.parse("")) + an[NotImplementedError] should be thrownBy (BinaryType.interpolate(SingleValue(""))) + } + + it should "provide the correct Spark type" in { + BinaryType.sparkType should be (org.apache.spark.sql.types.BinaryType) + } + + it should "provide the correct SQL type" in { + BinaryType.sqlType should be ("binary") + BinaryType.sparkType.sql should be ("BINARY") + BinaryType.typeName should be ("binary") + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/BooleanTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/BooleanTypeTest.scala index 66550d89d..e38bcf167 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/BooleanTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/BooleanTypeTest.scala @@ -16,12 +16,18 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import com.dimajix.flowman.util.ObjectMapper -class BooleanTypeTest extends FlatSpec with Matchers { - "A BooleanType" should "parse strings" in { + +class BooleanTypeTest extends AnyFlatSpec with Matchers { + "A BooleanType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("boolean") should be(BooleanType) + } + + it should "parse strings" in { BooleanType.parse("true") should be (true) BooleanType.parse("false") should be (false) } @@ -36,9 +42,12 @@ class BooleanTypeTest extends FlatSpec with Matchers { result.drop(1).head should be (false) } - it should "provide the correct SQL type" in { - val ftype = BooleanType + it should "provide the correct Spark type" in { + BooleanType.sparkType should be (org.apache.spark.sql.types.BooleanType) + } - ftype.sqlType should be ("boolean") + it should "provide the correct SQL type" in { + BooleanType.sqlType should be ("boolean") + BooleanType.sparkType.sql should be ("BOOLEAN") } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/ByteTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/ByteTypeTest.scala index 761c5d376..6c62ce4fd 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/ByteTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/ByteTypeTest.scala @@ -16,12 +16,19 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import com.dimajix.flowman.util.ObjectMapper -class ByteTypeTest extends FlatSpec with Matchers { - "A ByteType" should "parse strings" in { + +class ByteTypeTest extends AnyFlatSpec with Matchers { + "A ByteType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("byte") should be(ByteType) + ObjectMapper.parse[FieldType]("tinyint") should be(ByteType) + } + + it should "parse strings" in { ByteType.parse("12") should be (12) } @@ -54,4 +61,14 @@ class ByteTypeTest extends FlatSpec with Matchers { val result2 = ByteType.interpolate(RangeValue("13","17"), Some("2")) result2.toSeq should be (Seq(12,14).map(_.toByte)) } + + it should "provide the correct Spark type" in { + ByteType.sparkType should be (org.apache.spark.sql.types.ByteType) + } + + it should "provide the correct SQL type" in { + ByteType.sqlType should be ("tinyint") + ByteType.typeName should be ("byte") + ByteType.sparkType.sql should be ("TINYINT") + } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/CalendarIntervalTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/CalendarIntervalTypeTest.scala new file mode 100644 index 000000000..42542ffe1 --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/CalendarIntervalTypeTest.scala @@ -0,0 +1,49 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.types + +import org.apache.spark.sql.SparkShim +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.util.ObjectMapper +import com.dimajix.spark.SPARK_VERSION_MAJOR +import com.dimajix.util.DateTimeUtils + + +class CalendarIntervalTypeTest extends AnyFlatSpec with Matchers { + "A CalendarIntervalType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("calendarinterval") should be(CalendarIntervalType) + } + + it should "parse strings" in { + CalendarIntervalType.parse("interval 12 minute") should be (SparkShim.calendarInterval(0, 0, 12*DateTimeUtils.MICROS_PER_MINUTE)) + } + + it should "provide the correct Spark type" in { + CalendarIntervalType.sparkType should be (org.apache.spark.sql.types.CalendarIntervalType) + } + + it should "provide the correct SQL type" in { + CalendarIntervalType.sqlType should be ("calendarinterval") + CalendarIntervalType.typeName should be ("calendarinterval") + if (SPARK_VERSION_MAJOR <= 2) + CalendarIntervalType.sparkType.sql should be ("CALENDARINTERVAL") + else + CalendarIntervalType.sparkType.sql should be ("INTERVAL") + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/CharTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/CharTypeTest.scala index efe25f3b9..9fa0e4b3d 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/CharTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/CharTypeTest.scala @@ -16,22 +16,15 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.util.ObjectMapper -class CharTypeTest extends FlatSpec with Matchers { +class CharTypeTest extends AnyFlatSpec with Matchers { "A varchar type" should "be deserializable" in { - val spec = - """ - |char(14) - """.stripMargin - - val result = ObjectMapper.parse[FieldType](spec) - result.asInstanceOf[CharType].length should be (14) - result.sparkType should be (org.apache.spark.sql.types.StringType) + ObjectMapper.parse[FieldType]("char(14)") should be (CharType(14)) } it should "parse strings" in { @@ -48,10 +41,16 @@ class CharTypeTest extends FlatSpec with Matchers { result(1) should be ("27") } + it should "provide the correct Spark type" in { + val ftype = CharType(10) + ftype.sparkType should be (org.apache.spark.sql.types.StringType) + } + it should "provide the correct SQL type" in { val ftype = CharType(10) ftype.sqlType should be ("char(10)") ftype.typeName should be ("char(10)") + ftype.sparkType.sql should be ("STRING") } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/DateTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/DateTypeTest.scala index a41357735..3c1becce8 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/DateTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/DateTypeTest.scala @@ -18,12 +18,18 @@ package com.dimajix.flowman.types import java.sql.Date -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import com.dimajix.flowman.util.ObjectMapper -class DateTypeTest extends FlatSpec with Matchers { - "A DateType" should "parse strings" in { + +class DateTypeTest extends AnyFlatSpec with Matchers { + "A DateType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("date") should be(DateType) + } + + it should "parse strings" in { DateType.parse("2017-12-01") should be (Date.valueOf("2017-12-01")) } @@ -77,9 +83,12 @@ class DateTypeTest extends FlatSpec with Matchers { result2(3) should be (Date.valueOf("2017-12-16")) } - it should "provide the correct SQL type" in { - val ftype = DateType + it should "provide the correct Spark type" in { + DateType.sparkType should be (org.apache.spark.sql.types.DateType) + } - ftype.sqlType should be ("date") + it should "provide the correct SQL type" in { + DateType.sqlType should be ("date") + DateType.sparkType.sql should be ("DATE") } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/DecimalTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/DecimalTypeTest.scala index fa32428f0..67814271d 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/DecimalTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/DecimalTypeTest.scala @@ -16,13 +16,15 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import java.math.BigDecimal + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.util.ObjectMapper -class DecimalTypeTest extends FlatSpec with Matchers { +class DecimalTypeTest extends AnyFlatSpec with Matchers { "A decimal type" should "be deserializable" in { val spec = """ @@ -32,7 +34,11 @@ class DecimalTypeTest extends FlatSpec with Matchers { val result = ObjectMapper.parse[FieldType](spec) result.asInstanceOf[DecimalType].precision should be (10) result.asInstanceOf[DecimalType].scale should be (4) - result.sparkType should be (org.apache.spark.sql.types.DecimalType(10,4)) + } + + it should "provide the correct Spark type" in { + val ftype = DecimalType(10,4) + ftype.sparkType should be (org.apache.spark.sql.types.DecimalType(10,4)) } it should "provide the correct SQL type" in { @@ -40,4 +46,9 @@ class DecimalTypeTest extends FlatSpec with Matchers { ftype.sqlType should be ("decimal(10,4)") ftype.typeName should be ("decimal(10,4)") } + + it should "support parsing" in { + val ftype = DecimalType(10,4) + ftype.parse("10.3") should be (new BigDecimal(103).divide(new BigDecimal(10))) + } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/DoubleTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/DoubleTypeTest.scala index 55802a0b8..7de00f404 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/DoubleTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/DoubleTypeTest.scala @@ -16,11 +16,11 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class DoubleTypeTest extends FlatSpec with Matchers { +class DoubleTypeTest extends AnyFlatSpec with Matchers { "A DoubleType" should "parse strings" in { DoubleType.parse("1.0") should be (1.0) } @@ -70,8 +70,12 @@ class DoubleTypeTest extends FlatSpec with Matchers { result2.toSeq should be (Seq(12.0,16.0)) } + it should "provide the correct Spark type" in { + DoubleType.sparkType should be (org.apache.spark.sql.types.DoubleType) + } + it should "provide the correct SQL type" in { - val ftype = DoubleType - ftype.sqlType should be ("double") + DoubleType.sqlType should be ("double") + DoubleType.sparkType.sql should be ("DOUBLE") } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/DurationTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/DurationTypeTest.scala new file mode 100644 index 000000000..30a2ef830 --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/DurationTypeTest.scala @@ -0,0 +1,87 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.types + +import java.time.Duration + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.util.ObjectMapper + + +class DurationTypeTest extends AnyFlatSpec with Matchers { + "A DurationType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("duration") should be(DurationType) + } + + it should "parse strings" in { + DurationType.parse("P2D") should be (Duration.ofDays(2)) + DurationType.parse("PT3H") should be (Duration.ofHours(3)) + } + + it should "support interpolation of SingleValues" in { + DurationType.interpolate(SingleValue("P2D")).head should be (Duration.ofDays(2)) + } + + it should "support interpolation of SingleValues with granularity" in { + DurationType.interpolate(SingleValue("P2DT3H"), Some("PT1H")).head should be (Duration.ofHours(2*24+3)) + DurationType.interpolate(SingleValue("P2DT3H"), Some("P1D")).head should be (Duration.ofDays(2)) + } + + it should "support interpolation of ArrayValues" in { + DurationType.interpolate(ArrayValue(Array("P2DT3H","P3D"))).toSeq should + be (Seq(Duration.ofHours(2*24+3), Duration.ofHours(3*24))) + } + + it should "support interpolation of ArrayValues with granularity" in { + DurationType.interpolate(ArrayValue(Array("P2DT3H","P3D")), Some("P1D")).toSeq should + be (Seq(Duration.ofDays(2), Duration.ofDays(3))) + DurationType.interpolate(ArrayValue(Array("P2DT3H","P3D")), Some("PT1H")).toSeq should + be (Seq(Duration.ofHours(2*24+3), Duration.ofHours(3*24))) + } + + it should "support interpolation of Ranges" in { + DurationType.interpolate(RangeValue("P1D","P1DT6H", Some("PT1H")), None).toSeq should + be(Seq( + Duration.ofHours(1*24), + Duration.ofHours(1*24+1), + Duration.ofHours(1*24+2), + Duration.ofHours(1*24+3), + Duration.ofHours(1*24+4), + Duration.ofHours(1*24+5) + )) + } + + it should "support interpolation of Ranges with granularity" in { + DurationType.interpolate(RangeValue("P1D","P1DT6H", Some("PT1H")), Some("PT2H")).toSeq should + be(Seq( + Duration.ofHours(1*24), + Duration.ofHours(1*24+2), + Duration.ofHours(1*24+4) + )) + } + + it should "provide the correct Spark type" in { + an[NotImplementedError] should be thrownBy(DurationType.sparkType) + } + + it should "provide the correct SQL type" in { + DurationType.sqlType should be ("duration") + DurationType.typeName should be ("duration") + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/FieldTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/FieldTest.scala index 5d448a255..87558eb46 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/FieldTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/FieldTest.scala @@ -17,14 +17,14 @@ package com.dimajix.flowman.types import org.apache.spark.sql.types.MetadataBuilder -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.util.ObjectMapper -class FieldTest extends FlatSpec with Matchers { +class FieldTest extends AnyFlatSpec with Matchers { "A Field" should "be nullable per default" in { val spec = """ @@ -32,9 +32,6 @@ class FieldTest extends FlatSpec with Matchers { |type: String """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -50,9 +47,6 @@ class FieldTest extends FlatSpec with Matchers { |nullable: true """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -68,9 +62,6 @@ class FieldTest extends FlatSpec with Matchers { |nullable: false """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (false) result.name should be ("lala") @@ -87,9 +78,6 @@ class FieldTest extends FlatSpec with Matchers { |description: Some description """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -107,9 +95,6 @@ class FieldTest extends FlatSpec with Matchers { |size: 27 """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -127,9 +112,6 @@ class FieldTest extends FlatSpec with Matchers { |default: 27 """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -147,9 +129,6 @@ class FieldTest extends FlatSpec with Matchers { |default: """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -175,9 +154,6 @@ class FieldTest extends FlatSpec with Matchers { |default: null """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -195,9 +171,6 @@ class FieldTest extends FlatSpec with Matchers { |default: "" """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -214,9 +187,6 @@ class FieldTest extends FlatSpec with Matchers { |type: String """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -230,9 +200,6 @@ class FieldTest extends FlatSpec with Matchers { |type: TEXT """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -252,9 +219,6 @@ class FieldTest extends FlatSpec with Matchers { | nullable: false """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -278,9 +242,6 @@ class FieldTest extends FlatSpec with Matchers { | elementType: String """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -297,9 +258,6 @@ class FieldTest extends FlatSpec with Matchers { | elementType: String """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -316,9 +274,6 @@ class FieldTest extends FlatSpec with Matchers { | elementType: String """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -341,9 +296,6 @@ class FieldTest extends FlatSpec with Matchers { | type: Integer """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -370,9 +322,6 @@ class FieldTest extends FlatSpec with Matchers { |type: decimal(10,4) """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -387,9 +336,6 @@ class FieldTest extends FlatSpec with Matchers { |type: varchar(14) """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") @@ -404,9 +350,6 @@ class FieldTest extends FlatSpec with Matchers { |type: char(14) """.stripMargin - val session = Session.builder().build() - implicit val context = session.context - val result = ObjectMapper.parse[Field](spec) result.nullable should be (true) result.name should be ("lala") diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/FieldValueTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/FieldValueTest.scala index e43df205d..3482a8012 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/FieldValueTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/FieldValueTest.scala @@ -16,13 +16,13 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.util.ObjectMapper -class FieldValueTest extends FlatSpec with Matchers { +class FieldValueTest extends AnyFlatSpec with Matchers { "A String" should "be deserializable as a SingleValue" in { val spec ="some_string" val value = ObjectMapper.parse[FieldValue](spec) diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/FloatTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/FloatTypeTest.scala index 0eb4278cc..db93f44a2 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/FloatTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/FloatTypeTest.scala @@ -16,12 +16,18 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import com.dimajix.flowman.util.ObjectMapper -class FloatTypeTest extends FlatSpec with Matchers { - "A FloatType" should "parse strings" in { + +class FloatTypeTest extends AnyFlatSpec with Matchers { + "A FloatType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("float") should be(FloatType) + } + + it should "parse strings" in { FloatType.parse("1.0") should be (1.0) } @@ -35,8 +41,13 @@ class FloatTypeTest extends FlatSpec with Matchers { result(1) should be (2.0) } + it should "provide the correct Spark type" in { + FloatType.sparkType should be (org.apache.spark.sql.types.FloatType) + } + it should "provide the correct SQL type" in { - val ftype = FloatType - ftype.sqlType should be ("float") + FloatType.sqlType should be ("float") + FloatType.sparkType.sql should be ("FLOAT") + FloatType.typeName should be ("float") } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/IntegerTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/IntegerTypeTest.scala index aca2870f5..e73ec35a4 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/IntegerTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/IntegerTypeTest.scala @@ -16,21 +16,19 @@ package com.dimajix.flowman.types -import com.fasterxml.jackson.databind.ObjectMapper -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory -import com.fasterxml.jackson.module.scala.DefaultScalaModule -import org.scalatest.FlatSpec -import org.scalatest.Matchers - - -class IntegerTypeTest extends FlatSpec with Matchers { - lazy val mapper = { - val mapper = new ObjectMapper(new YAMLFactory()) - mapper.registerModule(DefaultScalaModule) - mapper +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.util.ObjectMapper + + +class IntegerTypeTest extends AnyFlatSpec with Matchers { + "A IntegerType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("int") should be(IntegerType) + ObjectMapper.parse[FieldType]("integer") should be(IntegerType) } - "A IntegerType" should "parse strings" in { + it should "parse strings" in { IntegerType.parse("12") should be (12) } @@ -99,23 +97,13 @@ class IntegerTypeTest extends FlatSpec with Matchers { result3.toSeq should be (Seq(16,20)) } - "A int type" should "be deserializable" in { - val spec = "int" - - val result = mapper.readValue(spec, classOf[FieldType]) - result should be (IntegerType) - result.sparkType should be (org.apache.spark.sql.types.IntegerType) - } - it should "be deserializable in long form" in { - val spec = "integer" - - val result = mapper.readValue(spec, classOf[FieldType]) - result should be (IntegerType) - result.sparkType should be (org.apache.spark.sql.types.IntegerType) + it should "provide the correct Spark type" in { + IntegerType.sparkType should be (org.apache.spark.sql.types.IntegerType) } it should "provide the correct SQL type" in { - val ftype = IntegerType - ftype.sqlType should be ("integer") + IntegerType.sqlType should be ("integer") + IntegerType.sparkType.sql should be ("INT") + IntegerType.typeName should be ("integer") } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/LongTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/LongTypeTest.scala index b4d03bbfb..b899604eb 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/LongTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/LongTypeTest.scala @@ -16,12 +16,19 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import com.dimajix.flowman.util.ObjectMapper -class LongTypeTest extends FlatSpec with Matchers { - "A LongType" should "parse strings" in { + +class LongTypeTest extends AnyFlatSpec with Matchers { + "A LongType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("long") should be(LongType) + ObjectMapper.parse[FieldType]("bigint") should be(LongType) + } + + it should "parse strings" in { LongType.parse("12") should be (12) } @@ -81,8 +88,13 @@ class LongTypeTest extends FlatSpec with Matchers { result2.toSeq should be (Seq(12,16)) } + it should "provide the correct Spark type" in { + LongType.sparkType should be (org.apache.spark.sql.types.LongType) + } + it should "provide the correct SQL type" in { - val ftype = LongType - ftype.sqlType should be ("long") + LongType.sqlType should be ("bigint") + LongType.typeName should be ("long") + LongType.sparkType.sql should be ("BIGINT") } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/MapTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/MapTypeTest.scala index 0f1b14ab4..ebedd1d15 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/MapTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/MapTypeTest.scala @@ -16,14 +16,14 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.util.ObjectMapper -class MapTypeTest extends FlatSpec with Matchers { +class MapTypeTest extends AnyFlatSpec with Matchers { "An MapType" should "be deserializable" in { val spec = """ diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/NullTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/NullTypeTest.scala new file mode 100644 index 000000000..2ccae8ef2 --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/NullTypeTest.scala @@ -0,0 +1,44 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.types + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.util.ObjectMapper + + +class NullTypeTest extends AnyFlatSpec with Matchers { + "A NullType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("\"null\"") should be(NullType) + } + + it should "not support parsing" in { + an[NotImplementedError] should be thrownBy (NullType.parse("")) + an[NotImplementedError] should be thrownBy (NullType.interpolate(SingleValue(""))) + } + + it should "provide the correct Spark type" in { + NullType.sparkType should be (org.apache.spark.sql.types.NullType) + } + + it should "provide the correct SQL type" in { + NullType.sqlType should be ("null") + NullType.sparkType.sql should be ("NULL") + NullType.typeName should be ("null") + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/RecordTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/RecordTest.scala new file mode 100644 index 000000000..b22a0a7ae --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/RecordTest.scala @@ -0,0 +1,273 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.types + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.types.RecordTest.RecordListWrapper +import com.dimajix.flowman.util.ObjectMapper + + +object RecordTest { + class RecordListWrapper { + var records:Seq[Record] = Seq() + } +} + +class RecordTest extends AnyFlatSpec with Matchers { + "A Record" should "be deserializable from a single numeric value" in { + val spec = + """ + |3 + """.stripMargin + + val result = ObjectMapper.parse[Record](spec) + result should be (ValueRecord("3")) + } + it should "be deserializable from a single boolean value" in { + val spec = + """ + |true + """.stripMargin + + val result = ObjectMapper.parse[Record](spec) + result should be (ValueRecord("true")) + } + it should "not be deserializable from a single null value" in { + val spec = + """ + |null + """.stripMargin + + val result = ObjectMapper.parse[Record](spec) + result should be (null) + } + it should "be deserializable from a single string value" in { + val spec = + """ + |"text" + """.stripMargin + + val result = ObjectMapper.parse[Record](spec) + result should be (ValueRecord("text")) + } + + it should "be deserializable from a map" in { + val spec = + """ + |field_1: lala + |field_2: null + |field_3: true + |field_4: 12 + |""".stripMargin + + val result = ObjectMapper.parse[Record](spec) + result should be (MapRecord(Map("field_1" -> "lala", "field_2" -> null, "field_3" -> "true", "field_4" -> "12"))) + + } + + it should "be deserializable from an array" in { + val spec = + """ + |[1,"text",null,"",true] + """.stripMargin + + val result = ObjectMapper.parse[Record](spec) + result should be (ArrayRecord(Seq("1","text",null,"","true"))) + } + it should "be deserializable from an empty array" in { + val spec = + """ + |[] + """.stripMargin + + val result = ObjectMapper.parse[Record](spec) + result should be (ArrayRecord(Seq())) + } + + "An embedded list of Records" should "be deserializable from an list" in { + val spec = + """ + |records: [] + """.stripMargin + + val result = ObjectMapper.parse[RecordListWrapper](spec) + result.records should be (Seq()) + } + it should "be deserializable from an single value" in { + val spec = + """ + |records: 1 + """.stripMargin + + val result = ObjectMapper.parse[RecordListWrapper](spec) + result.records should be (Seq(ValueRecord("1"))) + } + it should "not be deserializable from a null value" in { + val spec = + """ + |records: null + """.stripMargin + + val result = ObjectMapper.parse[RecordListWrapper](spec) + result.records should be (null) + } + + it should "be deserializable from an singleton list with a value" in { + val spec = + """ + |records: + | - 1 + """.stripMargin + + val result = ObjectMapper.parse[RecordListWrapper](spec) + result.records should be (Seq(ValueRecord("1"))) + } + it should "not be deserializable from a singleton list with a null value" in { + val spec = + """ + |records: + | - null + """.stripMargin + + val result = ObjectMapper.parse[RecordListWrapper](spec) + result.records should be (Seq(null)) + } + + it should "be deserializable from an empty nested list" in { + val spec = + """ + |records: + | - [] + """.stripMargin + + val result = ObjectMapper.parse[RecordListWrapper](spec) + result.records should be (Seq(ArrayRecord(Seq()))) + } + it should "be deserializable from an nested list with a single null value" in { + val spec = + """ + |records: + | - [null] + """.stripMargin + + val result = ObjectMapper.parse[RecordListWrapper](spec) + result.records should be (Seq(ArrayRecord(Seq(null)))) + } + it should "be deserializable from an nested list" in { + val spec = + """ + |records: + | - [1,2,"text",null] + """.stripMargin + + val result = ObjectMapper.parse[RecordListWrapper](spec) + result.records should be (Seq(ArrayRecord(Seq("1","2","text",null)))) + } + + "An embedded list of maps" should "be deseriazable" in { + val spec = + """ + |records: + | - field_1: lala + | field_2: null + | field_3: true + | field_4: 12 + |""".stripMargin + + val result = ObjectMapper.parse[RecordListWrapper](spec) + result.records should be (Seq(MapRecord(Map("field_1" -> "lala", "field_2" -> null, "field_3" -> "true", "field_4" -> "12")))) + } + + "A ValueRecord" should "return its value" in { + val schema = StructType(Seq( + Field("string", StringType) + )) + val record = ValueRecord("2") + + record.toArray(schema).toSeq should be (Seq("2")) + } + it should "fill up default values" in { + val schema = StructType(Seq( + Field("c1", StringType), + Field("c2", IntegerType, default = Some("12")), + Field("c3", StringType) + )) + val record = ValueRecord("2") + + record.toArray(schema).toSeq should be (Seq("2", "12", null)) + } + it should "support map" in { + val record = ValueRecord("a") + record.map(_.toUpperCase) should be (ValueRecord("A")) + } + it should "support map with a null value" in { + val record = ValueRecord(null) + record.map(v => if (v != null) v.toUpperCase else null) should be (ValueRecord(null)) + } + + "An ArrayRecord" should "return its values" in { + val schema = StructType(Seq( + Field("c1", StringType), + Field("c2", StringType) + )) + val record = ArrayRecord(Seq("2","str")) + + record.toArray(schema).toSeq should be (Seq("2","str")) + } + it should "fill up default values" in { + val schema = StructType(Seq( + Field("c1", StringType), + Field("c2", StringType), + Field("c3", IntegerType, default = Some("12")), + Field("c4", StringType) + )) + val record = ArrayRecord("2",null) + + record.toArray(schema).toSeq should be (Seq("2", null, "12", null)) + } + it should "drop additional values" in { + val schema = StructType(Seq( + Field("c1", StringType), + Field("c2", IntegerType) + )) + val record = ArrayRecord("2","3","4") + + record.toArray(schema).toSeq should be (Seq("2", "3")) + } + it should "support map" in { + val record = ArrayRecord("a", null) + record.map(v => if (v != null) v.toUpperCase else null) should be (ArrayRecord("A", null)) + } + + "A MapRecord" should "return correct values" in { + val schema = StructType(Seq( + Field("c1", StringType), + Field("c2", StringType), + Field("c3", IntegerType, default = Some("12")), + Field("c4", StringType) + )) + val record = MapRecord("c1" -> "2", "c2" -> null, "c5" -> "xxx") + + record.toArray(schema).toSeq should be (Seq("2", null, "12", null)) + } + it should "support map" in { + val record = MapRecord("f1" -> "a", "f2" -> null) + record.map(v => if (v != null) v.toUpperCase else null) should be (MapRecord("f1" -> "A", "f2" -> null)) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/SchemaUtilsTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/SchemaUtilsTest.scala index 37b22607f..d54a17267 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/SchemaUtilsTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/SchemaUtilsTest.scala @@ -16,11 +16,11 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class SchemaUtilsTest extends FlatSpec with Matchers { +class SchemaUtilsTest extends AnyFlatSpec with Matchers { "SchemaUtils.merge" should "merge two fields" in { SchemaUtils.merge( Field("f1", IntegerType, true), diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/ShortTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/ShortTypeTest.scala index 372fbe551..452982e5e 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/ShortTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/ShortTypeTest.scala @@ -16,12 +16,19 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import com.dimajix.flowman.util.ObjectMapper -class ShortTypeTest extends FlatSpec with Matchers { - "A ShortType" should "parse strings" in { + +class ShortTypeTest extends AnyFlatSpec with Matchers { + "A ShortType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("short") should be(ShortType) + ObjectMapper.parse[FieldType]("smallint") should be(ShortType) + } + + it should "parse strings" in { ShortType.parse("12") should be (12) } @@ -81,8 +88,13 @@ class ShortTypeTest extends FlatSpec with Matchers { result2.toSeq should be (Seq(12,14)) } + it should "provide the correct Spark type" in { + ShortType.sparkType should be (org.apache.spark.sql.types.ShortType) + } + it should "provide the correct SQL type" in { - val ftype = ShortType - ftype.sqlType should be ("short") + ShortType.sqlType should be ("smallint") + ShortType.typeName should be ("short") + ShortType.sparkType.sql should be ("SMALLINT") } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/StringTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/StringTypeTest.scala index 3625ebf7e..22e53d838 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/StringTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/StringTypeTest.scala @@ -16,12 +16,18 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import com.dimajix.flowman.util.ObjectMapper -class StringTypeTest extends FlatSpec with Matchers { - "A StringType" should "parse strings" in { + +class StringTypeTest extends AnyFlatSpec with Matchers { + "A StringType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("string") should be(StringType) + } + + it should "parse strings" in { StringType.parse("lala") should be ("lala") } @@ -35,9 +41,13 @@ class StringTypeTest extends FlatSpec with Matchers { result(1) should be ("27") } + it should "provide the correct Spark type" in { + StringType.sparkType should be (org.apache.spark.sql.types.StringType) + } + it should "provide the correct SQL type" in { - val ftype = StringType - ftype.sqlType should be ("string") + StringType.sqlType should be ("string") + StringType.sparkType.sql should be ("STRING") } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/StructTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/StructTypeTest.scala index e3754c152..58e0b40df 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/StructTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/StructTypeTest.scala @@ -16,14 +16,15 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class StructTypeTest extends FlatSpec with Matchers { +class StructTypeTest extends AnyFlatSpec with Matchers { "A StructType" should "provide the correct SQL type (1)" in { val ftype = StructType(Seq()) ftype.sqlType should be ("struct<>") + ftype.sparkType should be (org.apache.spark.sql.types.StructType(Seq())) } it should "provide the correct SQL type (2)" in { diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/TimestampTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/TimestampTypeTest.scala index 17bde78be..92c306b69 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/TimestampTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/TimestampTypeTest.scala @@ -21,15 +21,21 @@ import java.time.LocalDateTime import java.time.ZoneOffset import java.time.format.DateTimeFormatter -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers +import com.dimajix.flowman.util.ObjectMapper -class TimestampTypeTest extends FlatSpec with Matchers { + +class TimestampTypeTest extends AnyFlatSpec with Matchers { private val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss[.S]").withZone(ZoneOffset.UTC) def parseDateTime(value:String) = new Timestamp(LocalDateTime.parse(value, formatter).toEpochSecond(ZoneOffset.UTC) * 1000l) - "A TimestampType" should "parse strings" in { + "A TimestampType" should "be deserializable" in { + ObjectMapper.parse[FieldType]("timestamp") should be(TimestampType) + } + + it should "parse strings" in { TimestampType.parse("2017-12-01T12:21:20").toTimestamp should be (parseDateTime("2017-12-01T12:21:20")) TimestampType.parse("2017-12-01T12:21:20Z").toTimestamp should be (parseDateTime("2017-12-01T12:21:20")) TimestampType.parse("2017-12-01T12:21:20+00").toTimestamp should be (parseDateTime("2017-12-01T12:21:20")) @@ -164,8 +170,12 @@ class TimestampTypeTest extends FlatSpec with Matchers { result3(3).toTimestamp should be (parseDateTime("2017-12-16T00:00:00")) } + it should "provide the correct Spark type" in { + TimestampType.sparkType should be (org.apache.spark.sql.types.TimestampType) + } + it should "provide the correct SQL type" in { - val ftype = TimestampType - ftype.sqlType should be ("timestamp") + TimestampType.sqlType should be ("timestamp") + TimestampType.sparkType.sql should be ("TIMESTAMP") } } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/types/VarcharTypeTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/types/VarcharTypeTest.scala index b43366783..024025384 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/types/VarcharTypeTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/types/VarcharTypeTest.scala @@ -16,22 +16,15 @@ package com.dimajix.flowman.types -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.util.ObjectMapper -class VarcharTypeTest extends FlatSpec with Matchers { +class VarcharTypeTest extends AnyFlatSpec with Matchers { "A varchar type" should "be deserializable" in { - val spec = - """ - |varchar(14) - """.stripMargin - - val result = ObjectMapper.parse[FieldType](spec) - result.asInstanceOf[VarcharType].length should be (14) - result.sparkType should be (org.apache.spark.sql.types.StringType) + ObjectMapper.parse[FieldType]("varchar(14)") should be (VarcharType(14)) } it should "parse strings" in { @@ -48,11 +41,15 @@ class VarcharTypeTest extends FlatSpec with Matchers { result(1) should be ("27") } + it should "provide the correct Spark type" in { + VarcharType(10).sparkType should be (org.apache.spark.sql.types.StringType) + } + it should "provide the correct SQL type" in { val ftype = VarcharType(10) ftype.sqlType should be ("varchar(10)") ftype.typeName should be ("varchar(10)") + ftype.sparkType.sql should be ("STRING") } - } diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/util/ConsoleUtilsTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/util/ConsoleUtilsTest.scala new file mode 100644 index 000000000..b6bda3fab --- /dev/null +++ b/flowman-core/src/test/scala/com/dimajix/flowman/util/ConsoleUtilsTest.scala @@ -0,0 +1,55 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.util + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.spark.testing.LocalSparkSession + + +class ConsoleUtilsTest extends AnyFlatSpec with Matchers with LocalSparkSession { + val inputJson = + """ + |{ + | "int_field":123, + | "bool_field":true, + | "null_field":null + |}""".stripMargin + + "ConsoleUtils" should "show DataFrames as CSV" in { + val spark = this.spark + import spark.implicits._ + + val inputRecords = Seq(inputJson.replace("\n","")) + val inputDs = spark.createDataset(inputRecords) + val df = spark.read.json(inputDs) + + ConsoleUtils.showDataFrame(df, csv=true) + } + + it should "show DataFrames" in { + val spark = this.spark + import spark.implicits._ + + val inputRecords = Seq(inputJson.replace("\n","")) + val inputDs = spark.createDataset(inputRecords) + val df = spark.read.json(inputDs) + + ConsoleUtils.showDataFrame(df, csv=false) + } +} diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/util/SchemaUtilsTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/util/SchemaUtilsTest.scala index e6fc22f93..a9867ddd4 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/util/SchemaUtilsTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/util/SchemaUtilsTest.scala @@ -30,14 +30,14 @@ import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType import org.apache.spark.sql.types.VarcharType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.testing.LocalSparkSession import com.dimajix.spark.testing.QueryTest -class SchemaUtilsTest extends FlatSpec with Matchers with LocalSparkSession with QueryTest { +class SchemaUtilsTest extends AnyFlatSpec with Matchers with LocalSparkSession with QueryTest { "SchemaUtils.toLowerCase" should "convert all names to lower case" in { val schema = StructType( StructField("Name", StringType) :: diff --git a/flowman-core/src/test/scala/com/dimajix/flowman/util/UtcTimestampTest.scala b/flowman-core/src/test/scala/com/dimajix/flowman/util/UtcTimestampTest.scala index 1e7088d74..ad9be5c83 100644 --- a/flowman-core/src/test/scala/com/dimajix/flowman/util/UtcTimestampTest.scala +++ b/flowman-core/src/test/scala/com/dimajix/flowman/util/UtcTimestampTest.scala @@ -21,11 +21,11 @@ import java.time.LocalDateTime import java.time.ZoneOffset import java.time.format.DateTimeFormatter -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class UtcTimestampTest extends FlatSpec with Matchers { +class UtcTimestampTest extends AnyFlatSpec with Matchers { private val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm[:ss][.S]").withZone(ZoneOffset.UTC) def parseDateTime(value:String) = new Timestamp(LocalDateTime.parse(value, formatter).toEpochSecond(ZoneOffset.UTC) * 1000l) diff --git a/flowman-dist/bin/flowexec b/flowman-dist/bin/flowexec index 6334d55fd..7be84c1ed 100755 --- a/flowman-dist/bin/flowexec +++ b/flowman-dist/bin/flowexec @@ -8,5 +8,6 @@ APP_VERSION="${project.version}" APP_MAIN="com.dimajix.flowman.tools.exec.Driver" APP_JAR=$FLOWMAN_HOME/lib/"$APP_NAME-$APP_VERSION.jar" +LIB_JARS="${flowman-tools.classpath}" -spark_submit $APP_JAR $APP_MAIN "$@" +spark_submit $APP_JAR $LIB_JARS $APP_MAIN "$@" diff --git a/flowman-dist/bin/flowman b/flowman-dist/bin/flowman index 9abf9f97a..7a3c831b4 100755 --- a/flowman-dist/bin/flowman +++ b/flowman-dist/bin/flowman @@ -8,5 +8,6 @@ APP_VERSION="${project.version}" APP_MAIN="com.dimajix.flowman.tools.main.Driver" APP_JAR=$FLOWMAN_HOME/lib/"$APP_NAME-$APP_VERSION.jar" +LIB_JARS="${flowman-tools.classpath}" -spark_submit $APP_JAR $APP_MAIN "$@" +spark_submit $APP_JAR $LIB_JARS $APP_MAIN "$@" diff --git a/flowman-dist/bin/flowserver b/flowman-dist/bin/flowserver index 31b4d601a..af7fd6f85 100755 --- a/flowman-dist/bin/flowserver +++ b/flowman-dist/bin/flowserver @@ -8,5 +8,6 @@ APP_VERSION="${project.version}" APP_MAIN="com.dimajix.flowman.server.Application" APP_JAR=$FLOWMAN_HOME/lib/"$APP_NAME-$APP_VERSION.jar" +LIB_JARS="${flowman-server.classpath}" -spark_submit $APP_JAR $APP_MAIN "$@" +spark_submit $APP_JAR $LIB_JARS $APP_MAIN "$@" diff --git a/flowman-dist/bin/flowshell b/flowman-dist/bin/flowshell index 531210024..b7499bba2 100755 --- a/flowman-dist/bin/flowshell +++ b/flowman-dist/bin/flowshell @@ -8,5 +8,6 @@ APP_VERSION="${project.version}" APP_MAIN="com.dimajix.flowman.tools.shell.Shell" APP_JAR=$FLOWMAN_HOME/lib/"$APP_NAME-$APP_VERSION.jar" +LIB_JARS="${flowman-tools.classpath}" -spark_submit $APP_JAR $APP_MAIN "$@" +spark_submit $APP_JAR $LIB_JARS $APP_MAIN "$@" diff --git a/flowman-dist/conf/default-namespace.yml.template b/flowman-dist/conf/default-namespace.yml.template index 603d1be54..9e08ec20d 100644 --- a/flowman-dist/conf/default-namespace.yml.template +++ b/flowman-dist/conf/default-namespace.yml.template @@ -34,12 +34,13 @@ config: # This section enables plugins. You may want to remove plugins which are of no use for you. plugins: - - flowman-example - flowman-aws - flowman-azure - flowman-kafka - flowman-mariadb - flowman-mysql + - flowman-swagger + - flowman-json # The 'store' defines how Flowman can reference entities in different projects. diff --git a/flowman-dist/libexec/flowman-common.sh b/flowman-dist/libexec/flowman-common.sh index 79406c666..1a2bf6050 100644 --- a/flowman-dist/libexec/flowman-common.sh +++ b/flowman-dist/libexec/flowman-common.sh @@ -75,13 +75,11 @@ fi spark_submit() { - LIB_JARS=$(ls $FLOWMAN_HOME/lib/*.jar | awk -vORS=, '{ print $1 }' | sed 's/,$/\n/') - $SPARK_SUBMIT \ --driver-java-options "$SPARK_DRIVER_JAVA_OPTS" \ - --conf spark.executor.extraJavaOptions="$SPARK_EXECUTOR_JAVA_OPTS" \ - --class $2 \ + --conf spark.execution.extraJavaOptions="$SPARK_EXECUTOR_JAVA_OPTS" \ + --class $3 \ $SPARK_OPTS \ - --jars $LIB_JARS \ - $1 "${@:3}" + --jars $2 \ + $1 "${@:4}" } diff --git a/flowman-dist/pom.xml b/flowman-dist/pom.xml index a5fddc9ec..5debf03f5 100644 --- a/flowman-dist/pom.xml +++ b/flowman-dist/pom.xml @@ -10,7 +10,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 .. @@ -22,13 +22,29 @@ copy-resources - validate + process-resources copy-resources ${project.build.directory} + target/flowman-server-${project.version}-properties.properties,target/flowman-tools-${project.version}-properties.properties + + . + + bin/**/* + + true + + + . + + conf/**/* + libexec/**/* + + false + .. @@ -46,6 +62,7 @@ maven-dependency-plugin + unpack-plugins process-sources unpack @@ -70,33 +87,33 @@ com.dimajix.flowman - flowman-plugin-example + flowman-plugin-kafka ${project.version} tar.gz bin ${project.build.directory} + com.dimajix.flowman - flowman-plugin-hbase + flowman-plugin-impala ${project.version} tar.gz bin ${project.build.directory} - --> com.dimajix.flowman - flowman-plugin-impala + flowman-plugin-mariadb ${project.version} tar.gz bin @@ -104,7 +121,15 @@ com.dimajix.flowman - flowman-plugin-mariadb + flowman-plugin-swagger + ${project.version} + tar.gz + bin + ${project.build.directory} + + + com.dimajix.flowman + flowman-plugin-json ${project.version} tar.gz bin @@ -121,6 +146,33 @@ + + copy-properties + process-sources + + copy + + + + + com.dimajix.flowman + flowman-tools + ${project.version} + properties + properties + ${project.build.directory} + + + com.dimajix.flowman + flowman-server + ${project.version} + properties + properties + ${project.build.directory} + + + + diff --git a/flowman-dist/src/main/assembly/assembly.xml b/flowman-dist/src/main/assembly/assembly.xml index 41dd66866..e1dfb7ef4 100644 --- a/flowman-dist/src/main/assembly/assembly.xml +++ b/flowman-dist/src/main/assembly/assembly.xml @@ -10,49 +10,24 @@ flowman-${project.version} - ${project.basedir}/conf/ - conf + ${project.basedir}/target/ + 0644 0755 - **/* + conf/**/* + examples/**/* + plugins/**/* - ${project.basedir}/bin/ - bin + ${project.basedir}/target/ + 0755 0755 - **/* - - true - - - ${project.basedir}/libexec/ - libexec - 0755 - 0755 - - **/* - - - - ${project.basedir}/target/examples/ - examples - 0644 - 0755 - - **/* - - - - ${project.basedir}/target/plugins - plugins - 0644 - 0755 - - **/* + bin/**/* + libexec/**/* @@ -66,8 +41,6 @@ - org.json:json - com.github.everit-org.json-schema:org.everit.json.schema org.apache.velocity:velocity-engine-core lib diff --git a/flowman-dsl/pom.xml b/flowman-dsl/pom.xml index 6df2a4e63..3d7f9f6c5 100644 --- a/flowman-dsl/pom.xml +++ b/flowman-dsl/pom.xml @@ -9,7 +9,7 @@ flowman-root com.dimajix.flowman - 0.14.2 + 0.15.0 .. @@ -56,15 +56,5 @@ org.scalatest scalatest_${scala.api_version} - - - org.scalamock - scalamock_${scala.api_version} - - - - org.mockito - mockito-core - diff --git a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/mapping/ExtractJson.scala b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/mapping/ExtractJson.scala index 24b7a68e3..406814e2a 100644 --- a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/mapping/ExtractJson.scala +++ b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/mapping/ExtractJson.scala @@ -35,7 +35,7 @@ case class ExtractJson( props, input, column, - schema.instantiate(props.context), + Some(schema.instantiate(props.context)), parseMode ) } diff --git a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation.scala b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation.scala index 6d6d1fd52..9b62a44ab 100644 --- a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation.scala +++ b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation.scala @@ -33,13 +33,6 @@ class RelationWrapperFunctions(wrapper:Wrapper[Relation, Relation.Properties]) { override def props: Context => Relation.Properties = ctx => wrapper.props(ctx).copy(description = Some(desc)) } - def option(kv:(String,String)): RelationWrapper = new RelationWrapper { - override def gen: Relation.Properties => Relation = wrapper.gen - override def props: Context => Relation.Properties = ctx => { - val props = wrapper.props(ctx) - props.copy(options = props.options + kv) - } - } } case class RelationGenHolder(r:RelationGen) extends RelationWrapper { diff --git a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation/HiveTable.scala b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation/HiveTable.scala index ed00ca126..b27772e79 100644 --- a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation/HiveTable.scala +++ b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation/HiveTable.scala @@ -33,7 +33,8 @@ case class HiveTable( table: String, external: Boolean = false, location: Option[Path] = None, - format: String = "parquet", + format: Option[String] = None, + options: Map[String,String] = Map(), rowFormat: Option[String] = None, inputFormat: Option[String] = None, outputFormat: Option[String] = None, @@ -51,6 +52,7 @@ case class HiveTable( external = external, location = location, format = format, + options = options, rowFormat = rowFormat, inputFormat = inputFormat, outputFormat = outputFormat, diff --git a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation/HiveUnionTable.scala b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation/HiveUnionTable.scala index 7f3eba926..7de60dfe3 100644 --- a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation/HiveUnionTable.scala +++ b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/relation/HiveUnionTable.scala @@ -35,7 +35,8 @@ case class HiveUnionTable( viewDatabase: Option[String] = None, view: String, external: Boolean = false, - format: String = "parquet", + format: Option[String] = None, + options: Map[String,String] = Map(), rowFormat: Option[String] = None, inputFormat: Option[String] = None, outputFormat: Option[String] = None, @@ -55,6 +56,7 @@ case class HiveUnionTable( view, external, format, + options, rowFormat, inputFormat, outputFormat, diff --git a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/schema/EmbeddedSchema.scala b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/schema/EmbeddedSchema.scala index 7e0198b80..5d3c6a3aa 100644 --- a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/schema/EmbeddedSchema.scala +++ b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/schema/EmbeddedSchema.scala @@ -1,3 +1,19 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package com.dimajix.flowman.dsl.schema import com.dimajix.flowman.dsl.SchemaGen diff --git a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/target/CopyTarget.scala b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/target/CopyTarget.scala index 3008c6ae3..39072c7e2 100644 --- a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/target/CopyTarget.scala +++ b/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/target/CopyTarget.scala @@ -32,8 +32,9 @@ case class CopyTarget( source:Template[Dataset], target:Template[Dataset], schema:Option[CopyTarget.Schema] = None, + mode:OutputMode = OutputMode.OVERWRITE, parallelism:Int = 16, - mode:OutputMode = OutputMode.OVERWRITE + rebalance:Boolean = false ) extends TargetGen { override def apply(props: Target.Properties): com.dimajix.flowman.spec.target.CopyTarget = { val context = props.context @@ -42,8 +43,9 @@ case class CopyTarget( source.instantiate(context), target.instantiate(context), schema, + mode, parallelism, - mode + rebalance ) } } diff --git a/flowman-dsl/src/test/scala/com/dimajix/flowman/dsl/ExampleSpec.scala b/flowman-dsl/src/test/scala/com/dimajix/flowman/dsl/ExampleSpec.scala index 7def37001..a1eced36c 100644 --- a/flowman-dsl/src/test/scala/com/dimajix/flowman/dsl/ExampleSpec.scala +++ b/flowman-dsl/src/test/scala/com/dimajix/flowman/dsl/ExampleSpec.scala @@ -2,8 +2,8 @@ package com.dimajix.flowman.dsl import java.io.File -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.dsl.example.DqmProject import com.dimajix.flowman.execution.Lifecycle @@ -13,7 +13,7 @@ import com.dimajix.flowman.model.JobIdentifier import com.dimajix.spark.testing.LocalSparkSession -class ExampleSpec extends FlatSpec with Matchers with LocalSparkSession { +class ExampleSpec extends AnyFlatSpec with Matchers with LocalSparkSession { "A Project" should "be loadable" in (if (hiveSupported) { val project = DqmProject.instantiate() @@ -24,7 +24,7 @@ class ExampleSpec extends FlatSpec with Matchers with LocalSparkSession { .build() val context = session.getContext(project) - val executor = session.executor + val executor = session.execution val runner = session.runner val job = context.getJob(JobIdentifier("test")) diff --git a/flowman-plugins/example/.gitignore b/flowman-parent/.gitignore similarity index 100% rename from flowman-plugins/example/.gitignore rename to flowman-parent/.gitignore diff --git a/flowman-parent/pom.xml b/flowman-parent/pom.xml new file mode 100644 index 000000000..c206809c0 --- /dev/null +++ b/flowman-parent/pom.xml @@ -0,0 +1,772 @@ + + + 4.0.0 + flowman-parent + Flowman Parent BOM + pom + + + com.dimajix.flowman + flowman-root + 0.15.0 + .. + + + + + + org.codehaus.mojo + flatten-maven-plugin + + bom + true + true + + keep + keep + remove + + + + + flatten + package + + flatten + + + + flatten.clean + clean + + clean + + + + + + + + + + true + org.apache.maven.plugins + maven-clean-plugin + 3.1.0 + + false + + + + true + org.codehaus.mojo + build-helper-maven-plugin + 3.2.0 + + + true + org.codehaus.mojo + exec-maven-plugin + 3.0.0 + + + true + org.apache.maven.plugins + maven-jar-plugin + 3.2.0 + + + true + org.apache.maven.plugins + maven-resources-plugin + 3.1.0 + + + true + org.apache.maven.plugins + maven-compiler-plugin + 3.8.1 + + ${maven.compiler.source} + ${maven.compiler.target} + + + + true + net.alchim31.maven + scala-maven-plugin + 4.4.0 + + ${scala.version} + ${scala.api_version} + + -unchecked + -deprecation + -feature + -explaintypes + -Yno-adapted-args + -language:implicitConversions + + + -source + ${java.version} + -target + ${java.version} + -Xlint:all,-serial,-path,-try + + + + + scala-compile-first + process-resources + + add-source + compile + + + + scala-test-compile + process-test-resources + + testCompile + + + + attach-javadocs + + doc-jar + + + + + + true + org.scalatest + scalatest-maven-plugin + 2.0.0 + + ${project.build.directory}/surefire-reports + + test + false + false + 8 + file:///tmp/spark-warehouse + + + + + test + + test + + + + + + true + org.apache.maven.plugins + maven-dependency-plugin + 3.1.2 + + + org.apache.maven.plugins + maven-source-plugin + 3.1.0 + + true + + + + attach-sources + package + + jar-no-fork + + + + + + true + org.apache.maven.plugins + maven-surefire-plugin + 2.22.2 + + + true + org.codehaus.mojo + versions-maven-plugin + 2.8.1 + + + true + org.apache.maven.plugins + maven-assembly-plugin + 3.3.0 + + posix + + 0644 + 0755 + 0755 + + + + + make-assembly + package + + single + + + ${project.artifactId}-${project.version} + + src/main/assembly/assembly.xml + + + + + + + true + org.apache.maven.plugins + maven-deploy-plugin + 2.8.2 + + + true + org.apache.maven.plugins + maven-site-plugin + 3.9.1 + + + + + + + + + + com.dimajix.flowman + flowman-spark-extensions + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-core + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-spec + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-dsl + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-tools + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-server + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-plugin-aws + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-plugin-azure + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-plugin-kafka + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-plugin-impala + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-plugin-mariadb + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-plugin-mysql + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-plugin-swagger + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-plugin-json + ${flowman.version} + provided + + + com.dimajix.flowman + flowman-dist + ${flowman.version} + tar.gz + bin + provided + + + com.dimajix.flowman + flowman-scalatest-compat + ${flowman.version} + test + + + com.dimajix.flowman + flowman-spark-testing + ${flowman.version} + test + + + com.dimajix.flowman + flowman-testing + ${flowman.version} + test + + + + + org.scala-lang + scala-library + ${scala.version} + provided + + + org.scala-lang + scala-reflect + ${scala.version} + provided + + + + + org.apache.spark + spark-core_${scala.api_version} + ${spark.version} + provided + + + org.apache.spark + spark-tags_${scala.api_version} + ${spark.version} + provided + + + org.apache.spark + spark-sql_${scala.api_version} + ${spark.version} + provided + + + org.apache.spark + spark-catalyst_${scala.api_version} + ${spark.version} + provided + + + org.apache.spark + spark-hive_${scala.api_version} + ${spark.version} + provided + + + org.apache.velocity + velocity + + + jline + jline + + + + + org.apache.spark + spark-avro_${scala.api_version} + ${spark.version} + provided + + + com.databricks + spark-avro_${scala.api_version} + ${spark-avro.version} + provided + + + + org.xerial.snappy + snappy-java + 1.1.7.3 + provided + + + + + org.apache.hadoop + hadoop-client + ${hadoop.version} + provided + + + org.apache.hadoop + hadoop-mapreduce-client-jobclient + ${hadoop.version} + provided + + + org.apache.hadoop + hadoop-mapreduce-client-core + ${hadoop.version} + provided + + + org.apache.hadoop + hadoop-yarn-common + ${hadoop.version} + provided + + + javax.servlet + servlet-api + + + com.sun.jersey + jersey-core + + + com.sun.jersey + jersey-client + + + + + org.apache.hadoop + hadoop-hdfs + ${hadoop.version} + provided + + + org.apache.hadoop + hadoop-common + ${hadoop.version} + provided + + + org.slf4j + slf4j-log4j12 + + + log4j + log4j + + + org.eclipse.jetty + jetty + + + javax.servlet + servlet-api + + + + + + log4j + log4j + ${log4j.version} + provided + + + org.slf4j + slf4j-api + ${slf4j.version} + provided + + + org.slf4j + slf4j-ext + ${slf4j.version} + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + provided + + + + org.apache.avro + avro + ${avro.version} + provided + + + org.apache.avro + avro-mapred + ${avro.version} + hadoop2 + provided + + + + + org.codehaus.jackson + jackson-mapper-asl + ${jackson_asl.version} + provided + + + org.codehaus.jackson + jackson-core-asl + ${jackson_asl.version} + provided + + + org.codehaus.jackson + jackson-jaxrs + ${jackson_asl.version} + + + org.codehaus.jackson + jackson-xc + ${jackson_asl.version} + + + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.databind.version} + provided + + + com.fasterxml.jackson.core + jackson-core + ${jackson.version} + provided + + + com.fasterxml.jackson.core + jackson-annotations + ${jackson.version} + provided + + + com.fasterxml.jackson.module + jackson-module-paranamer + ${jackson.version} + provided + + + com.fasterxml.jackson.module + jackson-module-jaxb-annotations + ${jackson.version} + provided + + + com.fasterxml.jackson.module + jackson-module-scala_${scala.api_version} + ${jackson.version} + provided + + + com.fasterxml.jackson.jaxrs + jackson-jaxrs-json-provider + ${jackson.version} + provided + + + com.fasterxml.jackson.jaxrs + jackson-jaxrs-base + ${jackson.version} + provided + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + ${jackson.version} + provided + + + + com.thoughtworks.paranamer + paranamer + ${paranamer.version} + provided + + + + + org.apache.commons + commons-compress + ${commons-compress.version} + provided + + + org.apache.commons + commons-math3 + ${commons-math3.version} + provided + + + org.apache.commons + commons-lang3 + ${commons-lang3.version} + provided + + + commons-beanutils + commons-beanutils + ${commons-beanutils.version} + provided + + + commons-collections + commons-collections + ${commons-collections.version} + provided + + + commons-logging + commons-logging + ${commons-logging.version} + provided + + + commons-httpclient + commons-httpclient + ${commons-httpclient.version} + provided + + + commons-cli + commons-cli + ${commons-cli.version} + provided + + + commons-io + commons-io + ${commons-io.version} + provided + + + commons-codec + commons-codec + ${commons-codec.version} + provided + + + commons-dbcp + commons-dbcp + ${commons-dbcp.version} + provided + + + commons-lang + commons-lang + ${commons-lang.version} + provided + + + commons-digester + commons-digester + ${commons-digester.version} + provided + + + commons-pool + commons-pool + ${commons-pool.version} + provided + + + commons-compiler + commons-compiler + ${commons-compiler.version} + + + commons-validator + commons-validator + ${commons-validator.version} + provided + + + + org.apache.httpcomponents + httpclient + ${httpclient.version} + provided + + + org.apache.httpcomponents + httpcore + ${httpcore.version} + provided + + + + com.google.guava + guava + ${guava.version} + provided + + + + com.google.re2j + re2j + ${re2j.version} + provided + + + + org.scalatest + scalatest_${scala.api_version} + ${scalatest.version} + test + + + + org.scalamock + scalamock_${scala.api_version} + ${scalamock.version} + test + + + + + diff --git a/flowman-plugins/aws/pom.xml b/flowman-plugins/aws/pom.xml index 0dc448b73..aa5edd0ee 100644 --- a/flowman-plugins/aws/pom.xml +++ b/flowman-plugins/aws/pom.xml @@ -9,7 +9,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 ../.. @@ -17,10 +17,30 @@ flowman-aws ${project.version} ${project.build.finalName}.jar - 1.10.6 + + + 1.11.375 + + + default + + true + + + 1.11.375 + + + + com.amazonaws + aws-java-sdk-bundle + ${aws.version} + + + + hadoop-2.6 @@ -51,9 +71,6 @@ hadoop-2.8 - - true - 1.10.6 @@ -121,9 +138,12 @@ + + src/main/resources + true + src/hadoop-${hadoop-api.version}/resources - ${project.build.outputDirectory} true @@ -132,24 +152,13 @@ net.alchim31.maven scala-maven-plugin + + org.scalatest + scalatest-maven-plugin + org.apache.maven.plugins maven-assembly-plugin - - - make-assembly - package - - single - - - ${project.artifactId}-${project.version} - - src/main/assembly/assembly.xml - - - - @@ -161,10 +170,20 @@ provided + + com.dimajix.flowman + flowman-scalatest-compat + + org.apache.hadoop hadoop-aws ${hadoop.version} + + + org.scalatest + scalatest_${scala.api_version} + diff --git a/flowman-plugins/aws/src/hadoop-2.6/resources/plugin.yml b/flowman-plugins/aws/src/hadoop-2.6/resources/plugin.yml index 71b0b1775..b845e72f1 100644 --- a/flowman-plugins/aws/src/hadoop-2.6/resources/plugin.yml +++ b/flowman-plugins/aws/src/hadoop-2.6/resources/plugin.yml @@ -3,5 +3,6 @@ description: ${project.name} version: ${plugin.version} isolation: false jars: + - ${plugin.jar} - hadoop-aws-${hadoop.version}.jar - aws-java-sdk-${aws.version}.jar diff --git a/flowman-plugins/aws/src/hadoop-2.7/resources/plugin.yml b/flowman-plugins/aws/src/hadoop-2.7/resources/plugin.yml index 71b0b1775..b845e72f1 100644 --- a/flowman-plugins/aws/src/hadoop-2.7/resources/plugin.yml +++ b/flowman-plugins/aws/src/hadoop-2.7/resources/plugin.yml @@ -3,5 +3,6 @@ description: ${project.name} version: ${plugin.version} isolation: false jars: + - ${plugin.jar} - hadoop-aws-${hadoop.version}.jar - aws-java-sdk-${aws.version}.jar diff --git a/flowman-plugins/aws/src/hadoop-2.8/resources/plugin.yml b/flowman-plugins/aws/src/hadoop-2.8/resources/plugin.yml index 20f3f69f7..33fac821e 100644 --- a/flowman-plugins/aws/src/hadoop-2.8/resources/plugin.yml +++ b/flowman-plugins/aws/src/hadoop-2.8/resources/plugin.yml @@ -3,6 +3,7 @@ description: ${project.name} version: ${plugin.version} isolation: false jars: + - ${plugin.jar} - hadoop-aws-${hadoop.version}.jar - aws-java-sdk-core-${aws.version}.jar - aws-java-sdk-kms-${aws.version}.jar diff --git a/flowman-plugins/aws/src/hadoop-2.9/resources/plugin.yml b/flowman-plugins/aws/src/hadoop-2.9/resources/plugin.yml index c8e993aa6..c5f330e9b 100644 --- a/flowman-plugins/aws/src/hadoop-2.9/resources/plugin.yml +++ b/flowman-plugins/aws/src/hadoop-2.9/resources/plugin.yml @@ -3,5 +3,6 @@ description: ${project.name} version: ${plugin.version} isolation: false jars: + - ${plugin.jar} - hadoop-aws-${hadoop.version}.jar - aws-java-sdk-bundle-${aws.version}.jar diff --git a/flowman-plugins/aws/src/hadoop-3.1/resources/plugin.yml b/flowman-plugins/aws/src/hadoop-3.1/resources/plugin.yml index c8e993aa6..c5f330e9b 100644 --- a/flowman-plugins/aws/src/hadoop-3.1/resources/plugin.yml +++ b/flowman-plugins/aws/src/hadoop-3.1/resources/plugin.yml @@ -3,5 +3,6 @@ description: ${project.name} version: ${plugin.version} isolation: false jars: + - ${plugin.jar} - hadoop-aws-${hadoop.version}.jar - aws-java-sdk-bundle-${aws.version}.jar diff --git a/flowman-plugins/aws/src/hadoop-3.2/resources/plugin.yml b/flowman-plugins/aws/src/hadoop-3.2/resources/plugin.yml index c8e993aa6..c5f330e9b 100644 --- a/flowman-plugins/aws/src/hadoop-3.2/resources/plugin.yml +++ b/flowman-plugins/aws/src/hadoop-3.2/resources/plugin.yml @@ -3,5 +3,6 @@ description: ${project.name} version: ${plugin.version} isolation: false jars: + - ${plugin.jar} - hadoop-aws-${hadoop.version}.jar - aws-java-sdk-bundle-${aws.version}.jar diff --git a/flowman-plugins/aws/src/main/resources/META-INF/services/com.dimajix.flowman.spi.LogFilter b/flowman-plugins/aws/src/main/resources/META-INF/services/com.dimajix.flowman.spi.LogFilter new file mode 100644 index 000000000..40a36ccfe --- /dev/null +++ b/flowman-plugins/aws/src/main/resources/META-INF/services/com.dimajix.flowman.spi.LogFilter @@ -0,0 +1 @@ +com.dimajix.flowman.plugin.aws.AwsLogFilter diff --git a/flowman-plugins/aws/src/main/scala/com/dimajix/flowman/plugin/aws/AwsLogFilter.scala b/flowman-plugins/aws/src/main/scala/com/dimajix/flowman/plugin/aws/AwsLogFilter.scala new file mode 100644 index 000000000..f55fa0d33 --- /dev/null +++ b/flowman-plugins/aws/src/main/scala/com/dimajix/flowman/plugin/aws/AwsLogFilter.scala @@ -0,0 +1,39 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.plugin.aws + +import com.dimajix.flowman.plugin.aws.AwsLogFilter.redactedKeys +import com.dimajix.flowman.spi.LogFilter + +object AwsLogFilter { + val redactedKeys = Set( + "spark.hadoop.fs.s3a.proxy.password", + "spark.hadoop.fs.s3a.access.key", + "spark.hadoop.fs.s3a.secret.key" + ) +} + +class AwsLogFilter extends LogFilter { + override def filterConfig(key: String, value: String): Option[(String, String)] = { + if (redactedKeys contains key) { + Some((key, "***redacted***")) + } + else { + Some((key, value)) + } + } +} diff --git a/flowman-plugins/aws/src/test/scala/com/dimajix/flowman/plugin/aws/AwsLogFilterTest.scala b/flowman-plugins/aws/src/test/scala/com/dimajix/flowman/plugin/aws/AwsLogFilterTest.scala new file mode 100644 index 000000000..9378762ed --- /dev/null +++ b/flowman-plugins/aws/src/test/scala/com/dimajix/flowman/plugin/aws/AwsLogFilterTest.scala @@ -0,0 +1,34 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.plugin.aws + +import java.util.ServiceLoader + +import scala.collection.JavaConverters._ + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.spi.LogFilter + + +class AwsLogFilterTest extends AnyFlatSpec with Matchers { + "The AwsLogFilter" should "be loadable via ServiceLoader" in { + val logFilters = ServiceLoader.load(classOf[LogFilter]).iterator().asScala.toSeq + logFilters.count(_.isInstanceOf[AwsLogFilter]) should be (1) + } +} diff --git a/flowman-plugins/azure/pom.xml b/flowman-plugins/azure/pom.xml index b3821b810..27605db26 100644 --- a/flowman-plugins/azure/pom.xml +++ b/flowman-plugins/azure/pom.xml @@ -9,7 +9,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 ../.. @@ -17,6 +17,9 @@ flowman-azure ${project.version} ${project.build.finalName}.jar + + + 2.2.0 @@ -34,33 +37,13 @@ src/main/resources - ${project.build.outputDirectory} true - - net.alchim31.maven - scala-maven-plugin - org.apache.maven.plugins maven-assembly-plugin - - - make-assembly - package - - single - - - ${project.artifactId}-${project.version} - - src/main/assembly/assembly.xml - - - - diff --git a/flowman-plugins/example/src/main/scala/com/dimajix/flowman/plugin/example/HelloWorldTarget.scala b/flowman-plugins/example/src/main/scala/com/dimajix/flowman/plugin/example/HelloWorldTarget.scala deleted file mode 100644 index 8455c4ca6..000000000 --- a/flowman-plugins/example/src/main/scala/com/dimajix/flowman/plugin/example/HelloWorldTarget.scala +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright 2018 Kaya Kupferschmidt - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.dimajix.flowman.plugin.example - -import com.dimajix.flowman.annotation.TargetType -import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor -import com.dimajix.flowman.model.BaseTarget -import com.dimajix.flowman.model.Target -import com.dimajix.flowman.spec.target.TargetSpec - - -case class HelloWorldTarget( - instanceProperties:Target.Properties -) extends BaseTarget { - /** - * Abstract method which will perform the given task. - * - * @param executor - */ - override def build(executor: Executor): Unit = { - println("Hello world!") - } -} - - - -@TargetType(kind="hello-world") -class HelloWorldTargetSpec extends TargetSpec { - override def instantiate(context: Context): Target = { - HelloWorldTarget( - instanceProperties(context) - ) - } -} diff --git a/flowman-plugins/example/src/test/scala/com/dimajix/flowman/plugin/example/HelloWorldTargetTest.scala b/flowman-plugins/example/src/test/scala/com/dimajix/flowman/plugin/example/HelloWorldTargetTest.scala deleted file mode 100644 index f42ab0e00..000000000 --- a/flowman-plugins/example/src/test/scala/com/dimajix/flowman/plugin/example/HelloWorldTargetTest.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright 2018 Kaya Kupferschmidt - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.dimajix.flowman.plugin.example - -import org.scalatest.FlatSpec -import org.scalatest.Matchers - -import com.dimajix.flowman.execution.Session -import com.dimajix.flowman.model.Module - - -class HelloWorldTargetTest extends FlatSpec with Matchers { - "A HelloWorldTask" should "be deserializable" in { - val session = Session.builder().build() - val spec = - """ - |targets: - | custom: - | kind: hello-world - """.stripMargin - val module = Module.read.string(spec) - module.targets.keys should contain("custom") - - val target = module.targets("custom").instantiate(session.context) - target shouldBe an[HelloWorldTarget] - } -} diff --git a/flowman-plugins/hbase/pom.xml b/flowman-plugins/hbase/pom.xml index 4ac250423..f9e424097 100644 --- a/flowman-plugins/hbase/pom.xml +++ b/flowman-plugins/hbase/pom.xml @@ -68,21 +68,6 @@ org.apache.maven.plugins maven-assembly-plugin - - - make-assembly - package - - single - - - ${project.artifactId}-${project.version} - - src/main/assembly/assembly.xml - - - - @@ -252,20 +237,10 @@ test - - org.mockito - mockito-core - - org.scalatest scalatest_${scala.api_version} - - - org.scalamock - scalamock_${scala.api_version} - diff --git a/flowman-plugins/impala/pom.xml b/flowman-plugins/impala/pom.xml index 508e36170..b46203eb8 100644 --- a/flowman-plugins/impala/pom.xml +++ b/flowman-plugins/impala/pom.xml @@ -9,7 +9,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 ../.. @@ -31,7 +31,6 @@ src/main/resources - ${project.build.outputDirectory} true @@ -47,21 +46,6 @@ org.apache.maven.plugins maven-assembly-plugin - - - make-assembly - package - - single - - - ${project.artifactId}-${project.version} - - src/main/assembly/assembly.xml - - - - @@ -135,19 +119,9 @@ ${impala.jdbc.version} - - org.mockito - mockito-core - - org.scalatest scalatest_${scala.api_version} - - - org.scalamock - scalamock_${scala.api_version} - diff --git a/flowman-plugins/impala/src/main/scala/com/dimajix/flowman/spec/catalog/ImpalaCatalogSpec.scala b/flowman-plugins/impala/src/main/scala/com/dimajix/flowman/spec/catalog/ImpalaCatalogSpec.scala index 7c9a7c166..7a8322f82 100644 --- a/flowman-plugins/impala/src/main/scala/com/dimajix/flowman/spec/catalog/ImpalaCatalogSpec.scala +++ b/flowman-plugins/impala/src/main/scala/com/dimajix/flowman/spec/catalog/ImpalaCatalogSpec.scala @@ -18,11 +18,11 @@ package com.dimajix.flowman.spec.catalog import com.fasterxml.jackson.annotation.JsonProperty -import com.dimajix.flowman.annotation.CatalogType import com.dimajix.flowman.catalog.ExternalCatalog import com.dimajix.flowman.catalog.ImpalaExternalCatalog import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.ConnectionIdentifier +import com.dimajix.flowman.spec.annotation.CatalogType import com.dimajix.flowman.spec.connection.ImpalaConnection import com.dimajix.flowman.spec.connection.JdbcConnection diff --git a/flowman-plugins/impala/src/main/scala/com/dimajix/flowman/spec/connection/ImpalaConnection.scala b/flowman-plugins/impala/src/main/scala/com/dimajix/flowman/spec/connection/ImpalaConnection.scala index 277ddc003..90c65a045 100644 --- a/flowman-plugins/impala/src/main/scala/com/dimajix/flowman/spec/connection/ImpalaConnection.scala +++ b/flowman-plugins/impala/src/main/scala/com/dimajix/flowman/spec/connection/ImpalaConnection.scala @@ -18,11 +18,11 @@ package com.dimajix.flowman.spec.connection import com.fasterxml.jackson.annotation.JsonProperty -import com.dimajix.flowman.annotation.ConnectionType import com.dimajix.flowman.catalog.ImpalaExternalCatalog import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.BaseConnection import com.dimajix.flowman.model.Connection +import com.dimajix.flowman.spec.annotation.ConnectionType case class ImpalaConnection( diff --git a/flowman-plugins/impala/src/test/scala/com/dimajix/flowman/spec/catalog/ImpalaExternalCatalogTest.scala b/flowman-plugins/impala/src/test/scala/com/dimajix/flowman/spec/catalog/ImpalaExternalCatalogTest.scala index f1461e6b2..21e4c7a53 100644 --- a/flowman-plugins/impala/src/test/scala/com/dimajix/flowman/spec/catalog/ImpalaExternalCatalogTest.scala +++ b/flowman-plugins/impala/src/test/scala/com/dimajix/flowman/spec/catalog/ImpalaExternalCatalogTest.scala @@ -16,15 +16,15 @@ package com.dimajix.flowman.spec.catalog -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.catalog.ImpalaExternalCatalog import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Namespace -class ImpalaExternalCatalogTest extends FlatSpec with Matchers { +class ImpalaExternalCatalogTest extends AnyFlatSpec with Matchers { "An ImpalaCatalog" should "be readable from a namespace definition" in { val spec = """ diff --git a/flowman-plugins/json/.gitignore b/flowman-plugins/json/.gitignore new file mode 100644 index 000000000..b83d22266 --- /dev/null +++ b/flowman-plugins/json/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/flowman-plugins/json/pom.xml b/flowman-plugins/json/pom.xml new file mode 100644 index 000000000..0c9004cda --- /dev/null +++ b/flowman-plugins/json/pom.xml @@ -0,0 +1,124 @@ + + + 4.0.0 + flowman-plugin-json + Flowman JSON Schema plugin + + + com.dimajix.flowman + flowman-root + 0.15.0 + ../.. + + + + flowman-json + ${project.version} + ${project.build.finalName}.jar + + + + + + src/main/resources + true + + + + + net.alchim31.maven + scala-maven-plugin + + + org.scalatest + scalatest-maven-plugin + + + org.apache.maven.plugins + maven-shade-plugin + + + + org.json:json + com.github.everit-org.json-schema:org.everit.json.schema + + + + + *:* + + META-INF/* + + + + + + org.json + com.dimajix.shaded.json + + + org.everit.json + com.dimajix.shaded.everit + + + + + + + org.codehaus.mojo + flatten-maven-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + + + + + + + com.dimajix.flowman + flowman-spec + provided + + + + com.dimajix.flowman + flowman-spark-testing + test + + + + org.apache.spark + spark-core_${scala.api_version} + + + + org.apache.spark + spark-sql_${scala.api_version} + + + + org.json + json + 20190722 + compile + + + + com.github.everit-org.json-schema + org.everit.json.schema + 1.12.1 + compile + + + + org.scalatest + scalatest_${scala.api_version} + + + + + diff --git a/flowman-plugins/example/src/main/assembly/assembly.xml b/flowman-plugins/json/src/main/assembly/assembly.xml similarity index 83% rename from flowman-plugins/example/src/main/assembly/assembly.xml rename to flowman-plugins/json/src/main/assembly/assembly.xml index 9dc35b6db..9e4e42ff7 100644 --- a/flowman-plugins/example/src/main/assembly/assembly.xml +++ b/flowman-plugins/json/src/main/assembly/assembly.xml @@ -27,6 +27,11 @@ false runtime true + + + org.json:json + com.github.everit-org.json-schema:org.everit.json.schema + diff --git a/flowman-plugins/example/src/main/resources/plugin.yml b/flowman-plugins/json/src/main/resources/plugin.yml similarity index 80% rename from flowman-plugins/example/src/main/resources/plugin.yml rename to flowman-plugins/json/src/main/resources/plugin.yml index e722ea403..abd5eb9f4 100644 --- a/flowman-plugins/example/src/main/resources/plugin.yml +++ b/flowman-plugins/json/src/main/resources/plugin.yml @@ -2,4 +2,5 @@ name: ${plugin.name} description: ${project.name} version: ${plugin.version} isolation: false -jars: ${plugin.jar} +jars: + - ${plugin.jar} diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/JsonSchema.scala b/flowman-plugins/json/src/main/scala/com/dimajix/flowman/spec/schema/JsonSchema.scala similarity index 98% rename from flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/JsonSchema.scala rename to flowman-plugins/json/src/main/scala/com/dimajix/flowman/spec/schema/JsonSchema.scala index 47336da29..9ecc33f19 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/JsonSchema.scala +++ b/flowman-plugins/json/src/main/scala/com/dimajix/flowman/spec/schema/JsonSchema.scala @@ -37,6 +37,7 @@ import org.slf4j.LoggerFactory import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.spec.annotation.SchemaType import com.dimajix.flowman.spec.schema.ExternalSchema.CachedSchema import com.dimajix.flowman.types.ArrayType import com.dimajix.flowman.types.BooleanType @@ -137,7 +138,7 @@ case class JsonSchema( } - +@SchemaType(kind="json") class JsonSchemaSpec extends ExternalSchemaSpec { /** * Creates the instance of the specified Schema with all variable interpolation being performed diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/JsonSchemaTest.scala b/flowman-plugins/json/src/test/scala/com/dimajix/flowman/spec/schema/JsonSchemaTest.scala similarity index 97% rename from flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/JsonSchemaTest.scala rename to flowman-plugins/json/src/test/scala/com/dimajix/flowman/spec/schema/JsonSchemaTest.scala index 7bfe67c89..9d08a9631 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/JsonSchemaTest.scala +++ b/flowman-plugins/json/src/test/scala/com/dimajix/flowman/spec/schema/JsonSchemaTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec.schema -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.spec.ObjectMapper @@ -26,7 +26,8 @@ import com.dimajix.flowman.types.DoubleType import com.dimajix.flowman.types.StringType import com.dimajix.flowman.types.StructType -class JsonSchemaTest extends FlatSpec with Matchers { + +class JsonSchemaTest extends AnyFlatSpec with Matchers { "An JsonSchema" should "be declarable inline" in { val spec = """ diff --git a/flowman-plugins/kafka/pom.xml b/flowman-plugins/kafka/pom.xml index 3a3c87856..4bb3a4703 100644 --- a/flowman-plugins/kafka/pom.xml +++ b/flowman-plugins/kafka/pom.xml @@ -9,7 +9,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 ../.. @@ -23,7 +23,6 @@ src/main/resources - ${project.build.outputDirectory} true @@ -39,21 +38,6 @@ org.apache.maven.plugins maven-assembly-plugin - - - make-assembly - package - - single - - - ${project.artifactId}-${project.version} - - src/main/assembly/assembly.xml - - - - @@ -82,8 +66,10 @@ + org.apache.spark spark-hive_${scala.api_version} + provided @@ -111,20 +97,10 @@ test - - org.mockito - mockito-core - - org.scalatest scalatest_${scala.api_version} - - - org.scalamock - scalamock_${scala.api_version} - diff --git a/flowman-plugins/kafka/src/main/scala/com/dimajix/flowman/spec/relation/KafkaRelation.scala b/flowman-plugins/kafka/src/main/scala/com/dimajix/flowman/spec/relation/KafkaRelation.scala index d3c45ed12..8a5963e77 100644 --- a/flowman-plugins/kafka/src/main/scala/com/dimajix/flowman/spec/relation/KafkaRelation.scala +++ b/flowman-plugins/kafka/src/main/scala/com/dimajix/flowman/spec/relation/KafkaRelation.scala @@ -28,14 +28,14 @@ import org.slf4j.LoggerFactory import com.dimajix.common.Trilean import com.dimajix.common.Unknown -import com.dimajix.flowman.annotation.RelationType import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.model.BaseRelation import com.dimajix.flowman.model.Relation import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.spec.annotation.RelationType import com.dimajix.flowman.spec.schema.EmbeddedSchema import com.dimajix.flowman.types.BinaryType import com.dimajix.flowman.types.Field @@ -53,7 +53,8 @@ case class KafkaRelation( hosts:Seq[String], topics:Seq[String], startOffset:String="earliest", - endOffset:String="latest" + endOffset:String="latest", + options:Map[String,String]=Map() ) extends BaseRelation { private val logger = LoggerFactory.getLogger(classOf[KafkaRelation]) @@ -106,13 +107,13 @@ case class KafkaRelation( /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @param partitions - List of partitions. If none are specified, all the data will be read * @return */ - override def read(executor: Executor, schema: Option[StructType], partitions: Map[String, FieldValue]): DataFrame = { - require(executor != null) + override def read(execution: Execution, schema: Option[StructType], partitions: Map[String, FieldValue]): DataFrame = { + require(execution != null) require(schema != null) require(partitions != null) @@ -120,8 +121,7 @@ case class KafkaRelation( val topics = this.topics.mkString(",") logger.info(s"Reading Kafka topics '$topics' at hosts '$hosts'") - val reader = this.reader(executor) - .format("kafka") + val reader = this.reader(execution, "kafka", options) .option("subscribe", topics) .option("kafka.bootstrap.servers", hosts) .option("startingOffsets", startOffset) @@ -134,12 +134,12 @@ case class KafkaRelation( /** * Writes data into the relation, possibly into a specific partition * - * @param executor + * @param execution * @param df - dataframe to write * @param partition - destination partition */ - override def write(executor: Executor, df: DataFrame, partition: Map[String, SingleValue], mode: OutputMode): Unit = { - require(executor != null) + override def write(execution: Execution, df: DataFrame, partition: Map[String, SingleValue], mode: OutputMode): Unit = { + require(execution != null) require(df != null) require(partition != null) @@ -147,33 +147,32 @@ case class KafkaRelation( val topic = this.topics.headOption.getOrElse(throw new IllegalArgumentException(s"Missing field 'topic' in relation '$name'")) logger.info(s"Writing to Kafka topic '$topic' at hosts '$hosts'") - this.writer(executor, df, mode.batchMode) - .format("kafka") + this.writer(execution, df, "kafka", options, mode.batchMode) .option("topic", topic) .option("kafka.bootstrap.servers", hosts) .save() } - override def truncate(executor: Executor, partitions: Map[String, FieldValue]): Unit = { + override def truncate(execution: Execution, partitions: Map[String, FieldValue]): Unit = { throw new UnsupportedOperationException("Cleaning Kafka topics is not supported") } /** * Reads data from a streaming source * - * @param executor + * @param execution * @param schema * @return */ - override def readStream(executor: Executor, schema: Option[StructType]): DataFrame = { - require(executor != null) + override def readStream(execution: Execution, schema: Option[StructType]): DataFrame = { + require(execution != null) require(schema != null) val hosts = this.hosts.mkString(",") val topics = this.topics.mkString(",") logger.info(s"Streaming from Kafka topics '$topics' at hosts '$hosts'") - val reader = executor.spark.readStream.options(options) + val reader = execution.spark.readStream.options(options) .format("kafka") .option("subscribe", topics) .option("kafka.bootstrap.servers", hosts) @@ -186,20 +185,19 @@ case class KafkaRelation( /** * Writes data to a streaming sink * - * @param executor + * @param execution * @param df * @return */ - override def writeStream(executor: Executor, df: DataFrame, mode: OutputMode, checkpointLocation: Path): StreamingQuery = { - require(executor != null) + override def writeStream(execution: Execution, df: DataFrame, mode: OutputMode, checkpointLocation: Path): StreamingQuery = { + require(execution != null) require(df != null) val hosts = this.hosts.mkString(",") val topic = this.topics.headOption.getOrElse(throw new IllegalArgumentException(s"Missing field 'topic' in relation '$name'")) logger.info(s"Streaming to Kafka topic '$topic' at hosts '$hosts'") - this.streamWriter(executor, df, mode.streamMode, checkpointLocation) - .format("kafka") + this.streamWriter(execution, df, "kafka", options, mode.streamMode, checkpointLocation) .option("topic", topic) .option("kafka.bootstrap.servers", hosts) .start() @@ -207,38 +205,38 @@ case class KafkaRelation( /** * Verify if the corresponding physical backend of this relation already exists - * @param executor + * @param execution */ - override def exists(executor: Executor): Trilean = Unknown + override def exists(execution: Execution): Trilean = Unknown /** * Verify if the corresponding physical backend of this relation already exists - * @param executor + * @param execution */ - override def loaded(executor: Executor, partition:Map[String,SingleValue]): Trilean = Unknown + override def loaded(execution: Execution, partition:Map[String,SingleValue]): Trilean = Unknown /** * This method will physically create the corresponding relation. This might be a Hive table or a directory. The * relation will not contain any data, but all metadata will be processed * - * @param executor + * @param execution */ - override def create(executor: Executor, ignoreIfExsists: Boolean): Unit = ??? + override def create(execution: Execution, ignoreIfExsists: Boolean): Unit = ??? /** * This will delete any physical representation of the relation. Depending on the type only some meta data like * a Hive table might be dropped or also the physical files might be deleted * - * @param executor + * @param execution */ - override def destroy(executor: Executor, ignoreIfNotExists:Boolean): Unit = ??? + override def destroy(execution: Execution, ignoreIfNotExists:Boolean): Unit = ??? /** * This will update any existing relation to the specified metadata. * - * @param executor + * @param execution */ - override def migrate(executor: Executor): Unit = ??? + override def migrate(execution: Execution): Unit = ??? /** * Returns empty schema, so we read in all columns from Kafka @@ -262,7 +260,7 @@ case class KafkaRelation( * * @return */ - override protected def outputSchema : Option[StructType] = None + override protected def outputSchema(execution:Execution) : Option[StructType] = None } @@ -273,6 +271,7 @@ class KafkaRelationSpec extends RelationSpec { @JsonProperty(value = "topics", required = false) private var topics: Seq[String] = Seq() @JsonProperty(value = "startOffset", required = false) private var startOffset: String = "earliest" @JsonProperty(value = "endOffset", required = false) private var endOffset: String = "latest" + @JsonProperty(value = "options", required=false) private var options:Map[String,String] = Map() override def instantiate(context: Context): Relation = { KafkaRelation( @@ -280,7 +279,8 @@ class KafkaRelationSpec extends RelationSpec { hosts.map(context.evaluate), topics.map(context.evaluate), context.evaluate(startOffset), - context.evaluate(endOffset) + context.evaluate(endOffset), + context.evaluate(options) ) } } diff --git a/flowman-plugins/kafka/src/test/scala/com/dimajix/flowman/spec/relation/KafkaRelationTest.scala b/flowman-plugins/kafka/src/test/scala/com/dimajix/flowman/spec/relation/KafkaRelationTest.scala index f7665ebdf..1c34fa707 100644 --- a/flowman-plugins/kafka/src/test/scala/com/dimajix/flowman/spec/relation/KafkaRelationTest.scala +++ b/flowman-plugins/kafka/src/test/scala/com/dimajix/flowman/spec/relation/KafkaRelationTest.scala @@ -23,8 +23,8 @@ import org.apache.spark.sql.types.BinaryType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.execution.Session @@ -34,7 +34,7 @@ import com.dimajix.spark.testing.LocalSparkSession import com.dimajix.spark.testing.QueryTest -class KafkaRelationTest extends FlatSpec with Matchers with QueryTest with LocalSparkSession { +class KafkaRelationTest extends AnyFlatSpec with Matchers with QueryTest with LocalSparkSession { private var testUtils: KafkaTestUtils = _ private val topicId = new AtomicInteger(0) @@ -84,7 +84,7 @@ class KafkaRelationTest extends FlatSpec with Matchers with QueryTest with Local import org.apache.spark.sql.functions._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.context val topic = newTopic() @@ -111,7 +111,7 @@ class KafkaRelationTest extends FlatSpec with Matchers with QueryTest with Local import org.apache.spark.sql.functions._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.context val topic = newTopic() @@ -138,7 +138,7 @@ class KafkaRelationTest extends FlatSpec with Matchers with QueryTest with Local import org.apache.spark.sql.functions._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.context val topic = newTopic() @@ -164,7 +164,7 @@ class KafkaRelationTest extends FlatSpec with Matchers with QueryTest with Local it should "support stream reading" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.context val topic = newTopic() diff --git a/flowman-plugins/mariadb/pom.xml b/flowman-plugins/mariadb/pom.xml index 8ebe8ca0e..597c144e6 100644 --- a/flowman-plugins/mariadb/pom.xml +++ b/flowman-plugins/mariadb/pom.xml @@ -9,7 +9,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 ../.. @@ -24,33 +24,13 @@ src/main/resources - ${project.build.outputDirectory} true - - net.alchim31.maven - scala-maven-plugin - org.apache.maven.plugins maven-assembly-plugin - - - make-assembly - package - - single - - - ${project.artifactId}-${project.version} - - src/main/assembly/assembly.xml - - - - diff --git a/flowman-plugins/mysql/pom.xml b/flowman-plugins/mysql/pom.xml index 76fd7ef4b..ceeadb10a 100644 --- a/flowman-plugins/mysql/pom.xml +++ b/flowman-plugins/mysql/pom.xml @@ -9,7 +9,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 ../.. @@ -24,33 +24,13 @@ src/main/resources - ${project.build.outputDirectory} true - - net.alchim31.maven - scala-maven-plugin - org.apache.maven.plugins maven-assembly-plugin - - - make-assembly - package - - single - - - ${project.artifactId}-${project.version} - - src/main/assembly/assembly.xml - - - - diff --git a/flowman-plugins/swagger/.gitignore b/flowman-plugins/swagger/.gitignore new file mode 100644 index 000000000..b83d22266 --- /dev/null +++ b/flowman-plugins/swagger/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/flowman-plugins/example/pom.xml b/flowman-plugins/swagger/pom.xml similarity index 62% rename from flowman-plugins/example/pom.xml rename to flowman-plugins/swagger/pom.xml index 646b41408..2cbe53844 100644 --- a/flowman-plugins/example/pom.xml +++ b/flowman-plugins/swagger/pom.xml @@ -3,18 +3,18 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 - flowman-plugin-example - Flowman example plugin + flowman-plugin-swagger + Flowman Swagger Schema plugin com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 ../.. - flowman-example + flowman-swagger ${project.version} ${project.build.finalName}.jar @@ -23,7 +23,6 @@ src/main/resources - ${project.build.outputDirectory} true @@ -39,21 +38,6 @@ org.apache.maven.plugins maven-assembly-plugin - - - make-assembly - package - - single - - - ${project.artifactId}-${project.version} - - src/main/assembly/assembly.xml - - - - @@ -65,27 +49,38 @@ provided + + com.dimajix.flowman + flowman-dsl + provided + + + + com.dimajix.flowman + flowman-spark-testing + test + + org.apache.spark - spark-sql_${scala.api_version} + spark-core_${scala.api_version} - org.mockito - mockito-core - provided + org.apache.spark + spark-sql_${scala.api_version} - org.scalatest - scalatest_${scala.api_version} - provided + io.swagger + swagger-parser + 1.0.49 + compile - org.scalamock - scalamock_${scala.api_version} - provided + org.scalatest + scalatest_${scala.api_version} diff --git a/flowman-plugins/swagger/src/main/assembly/assembly.xml b/flowman-plugins/swagger/src/main/assembly/assembly.xml new file mode 100644 index 000000000..30953d407 --- /dev/null +++ b/flowman-plugins/swagger/src/main/assembly/assembly.xml @@ -0,0 +1,36 @@ + + bin + + tar.gz + + false + + + ${project.build.outputDirectory} + plugins/${plugin.name} + 0644 + 0755 + + plugin.yml + + + + + + + + com.dimajix.flowman:flowman-spec + org.slf4j:slf4j-ext + + plugins/${plugin.name} + true + true + false + runtime + true + + + diff --git a/flowman-plugins/swagger/src/main/resources/plugin.yml b/flowman-plugins/swagger/src/main/resources/plugin.yml new file mode 100644 index 000000000..3a3c17118 --- /dev/null +++ b/flowman-plugins/swagger/src/main/resources/plugin.yml @@ -0,0 +1,10 @@ +name: ${plugin.name} +description: ${project.name} +version: ${plugin.version} +isolation: false +jars: + - ${plugin.jar} + - swagger-parser-1.0.49.jar + - swagger-annotations-1.6.0.jar + - swagger-core-1.6.0.jar + - swagger-models-1.6.0.jar diff --git a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/schema/SwaggerSchema.scala b/flowman-plugins/swagger/src/main/scala/com/dimajix/flowman/dsl/schema/SwaggerSchema.scala similarity index 90% rename from flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/schema/SwaggerSchema.scala rename to flowman-plugins/swagger/src/main/scala/com/dimajix/flowman/dsl/schema/SwaggerSchema.scala index 5a3ce49da..35e4b7830 100644 --- a/flowman-dsl/src/main/scala/com/dimajix/flowman/dsl/schema/SwaggerSchema.scala +++ b/flowman-plugins/swagger/src/main/scala/com/dimajix/flowman/dsl/schema/SwaggerSchema.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2020 Kaya Kupferschmidt + * Copyright 2018-2021 Kaya Kupferschmidt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -33,8 +33,8 @@ case class SwaggerSchema( entity: Option[String] = None, nullable: Boolean = false ) -extends SchemaGen { - override def instantiate(context:Context) : schema.SwaggerSchema = { + extends SchemaGen { + override def instantiate(context: Context): schema.SwaggerSchema = { schema.SwaggerSchema( Schema.Properties(context), file, diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchema.scala b/flowman-plugins/swagger/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchema.scala similarity index 97% rename from flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchema.scala rename to flowman-plugins/swagger/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchema.scala index 46f170213..49f02ebf4 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchema.scala +++ b/flowman-plugins/swagger/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchema.scala @@ -24,6 +24,7 @@ import org.slf4j.LoggerFactory import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.spec.annotation.SchemaType import com.dimajix.flowman.spec.schema.ExternalSchema.CachedSchema @@ -57,7 +58,7 @@ case class SwaggerSchema( } - +@SchemaType(kind="swagger") class SwaggerSchemaSpec extends ExternalSchemaSpec { @JsonProperty(value="entity", required=false) private var entity: Option[String] = None @JsonProperty(value="nullable", required=false) private var nullable: String = "false" diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtils.scala b/flowman-plugins/swagger/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtils.scala similarity index 96% rename from flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtils.scala rename to flowman-plugins/swagger/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtils.scala index be274ead7..8b8f19aec 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtils.scala +++ b/flowman-plugins/swagger/src/main/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtils.scala @@ -89,7 +89,9 @@ object SwaggerSchemaUtils { * @return */ def fromSwagger(swagger:Swagger, entity:Option[String], nullable:Boolean) : Seq[Field] = { - val model = entity.filter(_.nonEmpty).map(e => swagger.getDefinitions().get(e)).getOrElse(swagger.getDefinitions().values().asScala.head) + val model = entity.filter(_.nonEmpty) + .map(e => swagger.getDefinitions().get(e)) + .getOrElse(swagger.getDefinitions().values().asScala.head) fromSwagger(model, nullable) } @@ -160,7 +162,8 @@ object SwaggerSchemaUtils { obj.set("type", TextNode.valueOf("object")) obj.withArray("required").addAll(required.asJava) properties.foreach(x => obj.`with`("properties").set(x.getKey, x.getValue): AnyRef) - desc.foreach(d => obj.set("description", d)) + // Use ObjectNode.replace instead of ObjectNode.set since set has changed its signature in newer Jackson versions + desc.foreach(d => obj.replace("description", d)) case obj: ObjectNode if obj.get("items") != null => replaceAllOf(obj.get("items")) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaTest.scala b/flowman-plugins/swagger/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaTest.scala similarity index 98% rename from flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaTest.scala rename to flowman-plugins/swagger/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaTest.scala index a7a866073..7a9021c72 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaTest.scala +++ b/flowman-plugins/swagger/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec.schema -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.spec.ObjectMapper @@ -31,7 +31,7 @@ import com.dimajix.flowman.types.StringType import com.dimajix.flowman.types.StructType -class SwaggerSchemaTest extends FlatSpec with Matchers { +class SwaggerSchemaTest extends AnyFlatSpec with Matchers { "A Swagger Schema" should "be deserializable" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtilsTest.scala b/flowman-plugins/swagger/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtilsTest.scala similarity index 98% rename from flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtilsTest.scala rename to flowman-plugins/swagger/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtilsTest.scala index 7c8535ea0..e5bd55a9a 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtilsTest.scala +++ b/flowman-plugins/swagger/src/test/scala/com/dimajix/flowman/spec/schema/SwaggerSchemaUtilsTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec.schema -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.types.ArrayType import com.dimajix.flowman.types.CharType @@ -34,7 +34,7 @@ import com.dimajix.flowman.types.TimestampType import com.dimajix.flowman.types.VarcharType -class SwaggerSchemaUtilsTest extends FlatSpec with Matchers { +class SwaggerSchemaUtilsTest extends AnyFlatSpec with Matchers { "A Swagger Schema" should "be deserializable" in { val spec = """ @@ -232,6 +232,7 @@ class SwaggerSchemaUtilsTest extends FlatSpec with Matchers { | allOf: | - | type: object + | description: Some Pet object primary properties | required: | - name | properties: diff --git a/flowman-scalatest-compat/.gitignore b/flowman-scalatest-compat/.gitignore new file mode 100644 index 000000000..b83d22266 --- /dev/null +++ b/flowman-scalatest-compat/.gitignore @@ -0,0 +1 @@ +/target/ diff --git a/flowman-scalatest-compat/pom.xml b/flowman-scalatest-compat/pom.xml new file mode 100644 index 000000000..a37c0ae3c --- /dev/null +++ b/flowman-scalatest-compat/pom.xml @@ -0,0 +1,65 @@ + + + 4.0.0 + flowman-scalatest-compat + Flowman ScalaTest compatibility library + + + com.dimajix.flowman + flowman-root + 0.15.0 + .. + + + + + + net.alchim31.maven + scala-maven-plugin + + + org.codehaus.mojo + build-helper-maven-plugin + 3.0.0 + + + add-source + generate-sources + + add-source + + + + ${project.basedir}/src/main/scalatest-${scalatest-api.version} + + + + + add-site-source + pre-site + + add-source + + + + ${project.basedir}/src/main/scalatest-${scalatest-api.version} + + + + + + + + + + + org.scalatest + scalatest_${scala.api_version} + provided + + + + + diff --git a/flowman-scalatest-compat/src/main/scalatest-3.0/org/scalatest/flatspec/package.scala b/flowman-scalatest-compat/src/main/scalatest-3.0/org/scalatest/flatspec/package.scala new file mode 100644 index 000000000..c0f2e8ba5 --- /dev/null +++ b/flowman-scalatest-compat/src/main/scalatest-3.0/org/scalatest/flatspec/package.scala @@ -0,0 +1,5 @@ +package org.scalatest + +package object flatspec { + type AnyFlatSpec = org.scalatest.FlatSpec +} diff --git a/flowman-scalatest-compat/src/main/scalatest-3.0/org/scalatest/matchers/should/package.scala b/flowman-scalatest-compat/src/main/scalatest-3.0/org/scalatest/matchers/should/package.scala new file mode 100644 index 000000000..722a3e2ec --- /dev/null +++ b/flowman-scalatest-compat/src/main/scalatest-3.0/org/scalatest/matchers/should/package.scala @@ -0,0 +1,5 @@ +package org.scalatest.matchers + +package object should { + type Matchers = org.scalatest.Matchers +} diff --git a/flowman-server/pom.xml b/flowman-server/pom.xml index 5a74f204a..3208203d1 100644 --- a/flowman-server/pom.xml +++ b/flowman-server/pom.xml @@ -9,7 +9,7 @@ flowman-root com.dimajix.flowman - 0.14.2 + 0.15.0 .. @@ -23,6 +23,75 @@ org.scalatest scalatest-maven-plugin + + + org.apache.maven.plugins + maven-dependency-plugin + + + initialize + + build-classpath + + + runtime + flowman-server.classpath + false + , + $FLOWMAN_HOME/lib + + json,org.everit.json.schema,velocity-engine-core + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + copy-resources + process-resources + + copy-resources + + + ${project.build.directory}/properties + + + src/main/properties + + **/* + + true + + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + + + attach-artifacts + package + + attach-artifact + + + + + ${project.build.directory}/properties/flowman-server.properties + properties + properties + + + + + + @@ -37,6 +106,11 @@ flowman-ui + + com.dimajix.flowman + flowman-scalatest-compat + + org.apache.hadoop hadoop-client @@ -63,19 +137,18 @@ 10.1.8 - - com.github.swagger-akka-http - swagger-akka-http_${scala.api_version} - 1.1.0 - - com.typesafe.akka akka-http-spray-json_${scala.api_version} 10.1.8 - + + com.github.swagger-akka-http + swagger-akka-http_${scala.api_version} + 1.1.0 + + org.webjars swagger-ui diff --git a/flowman-server/src/main/properties/flowman-server.properties b/flowman-server/src/main/properties/flowman-server.properties new file mode 100644 index 000000000..0c6324bc4 --- /dev/null +++ b/flowman-server/src/main/properties/flowman-server.properties @@ -0,0 +1 @@ +flowman-server.classpath=${flowman-server.classpath} diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/model/Converter.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/model/Converter.scala index 2d6cc5154..4f18783e4 100644 --- a/flowman-server/src/main/scala/com/dimajix/flowman/server/model/Converter.scala +++ b/flowman-server/src/main/scala/com/dimajix/flowman/server/model/Converter.scala @@ -16,6 +16,7 @@ package com.dimajix.flowman.server.model +import com.dimajix.flowman.history import com.dimajix.flowman.model @@ -40,7 +41,9 @@ object Converter { project.config, project.profiles.keys.toSeq, project.connections.keys.toSeq, - project.basedir.toString + project.basedir.map(_.toString), + project.jobs.keys.toSeq, + project.targets.keys.toSeq ) } @@ -52,4 +55,33 @@ object Converter { job.environment ) } + + def ofSpec(jobState:history.JobState) : JobState = { + JobState( + jobState.id, + jobState.namespace, + jobState.project, + jobState.job, + jobState.phase.toString, + jobState.args, + jobState.status.toString, + jobState.startDateTime, + jobState.endDateTime + ) + } + + def ofSpec(state:history.TargetState) : TargetState = { + TargetState( + state.id, + state.jobId, + state.namespace, + state.project, + state.target, + state.partitions, + state.phase.toString, + state.status.toString, + state.startDateTime, + state.endDateTime + ) + } } diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/model/JobState.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/model/JobState.scala new file mode 100644 index 000000000..247122548 --- /dev/null +++ b/flowman-server/src/main/scala/com/dimajix/flowman/server/model/JobState.scala @@ -0,0 +1,33 @@ +/* + * Copyright 2019-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.server.model + +import java.time.ZonedDateTime + + +case class JobState( + id:String, + namespace:String, + project:String, + job:String, + phase:String, + args:Map[String,String], + status:String, + startDateTime:Option[ZonedDateTime] = None, + endDateTime:Option[ZonedDateTime] = None +) { +} diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/model/JsonSupport.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/model/JsonSupport.scala index 198df7ba4..82266b933 100644 --- a/flowman-server/src/main/scala/com/dimajix/flowman/server/model/JsonSupport.scala +++ b/flowman-server/src/main/scala/com/dimajix/flowman/server/model/JsonSupport.scala @@ -16,15 +16,38 @@ package com.dimajix.flowman.server.model +import java.time.LocalDateTime +import java.time.ZonedDateTime +import java.time.format.DateTimeFormatter + import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport import spray.json.DefaultJsonProtocol +import spray.json.DeserializationException +import spray.json.JsString +import spray.json.JsValue +import spray.json.JsonFormat import spray.json.RootJsonFormat -trait JsonSupport extends DefaultJsonProtocol with SprayJsonSupport{ +trait JsonSupport extends DefaultJsonProtocol with SprayJsonSupport { + implicit object ZonedDateTimeFormat extends JsonFormat[ZonedDateTime] { + final val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") + def write(value:ZonedDateTime) : JsString = { + JsString(value.format(formatter)) + } + def read(value:JsValue) : ZonedDateTime = { + value match { + case JsString(dt) => ZonedDateTime.parse(dt, formatter) + case _ => throw DeserializationException("Not a boolean") + } + } + } + implicit val namespaceFormat: RootJsonFormat[Namespace] = jsonFormat6(Namespace) - implicit val projectFormat: RootJsonFormat[Project] = jsonFormat8(Project) + implicit val projectFormat: RootJsonFormat[Project] = jsonFormat10(Project) implicit val jobFormat: RootJsonFormat[Job] = jsonFormat4(Job) + implicit val jobStateFormat: RootJsonFormat[JobState] = jsonFormat9(JobState) + implicit val targetStateFormat: RootJsonFormat[TargetState] = jsonFormat10(TargetState) } diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/model/Project.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/model/Project.scala index e314b5f1b..1a95b4308 100644 --- a/flowman-server/src/main/scala/com/dimajix/flowman/server/model/Project.scala +++ b/flowman-server/src/main/scala/com/dimajix/flowman/server/model/Project.scala @@ -1,5 +1,22 @@ +/* + * Copyright 2019 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package com.dimajix.flowman.server.model + case class Project( name:String, version:Option[String], @@ -8,6 +25,8 @@ case class Project( config: Map[String,String], profiles: Seq[String], connections: Seq[String], - basedir: String + basedir: Option[String], + jobs: Seq[String], + targets: Seq[String] ) { } diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/model/TargetState.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/model/TargetState.scala new file mode 100644 index 000000000..6a1525fd1 --- /dev/null +++ b/flowman-server/src/main/scala/com/dimajix/flowman/server/model/TargetState.scala @@ -0,0 +1,34 @@ +/* + * Copyright 2019-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.server.model + +import java.time.ZonedDateTime + + +case class TargetState( + id:String, + jobId:Option[String], + namespace:String, + project:String, + target:String, + partitions:Map[String,String], + phase:String, + status:String, + startDateTime:Option[ZonedDateTime] = None, + endDateTime:Option[ZonedDateTime] = None +) { +} diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/HistoryService.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/HistoryService.scala deleted file mode 100644 index 955ece6f7..000000000 --- a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/HistoryService.scala +++ /dev/null @@ -1,59 +0,0 @@ -package com.dimajix.flowman.server.rest - -import akka.http.scaladsl.server.Route - -import com.dimajix.flowman.history.StateStore - - -class HistoryService(history:StateStore) { - import akka.http.scaladsl.server.Directives._ - - def routes : Route = pathPrefix("history") {( - pathEndOrSingleSlash { - reject - } - ~ - pathPrefix(Segment) { project => ( - pathPrefix("job") {( - pathEndOrSingleSlash { - parameterMap { params => - reject - } - } - ~ - pathPrefix(Segment) { job => ( - pathEndOrSingleSlash { - parameterMap { params => - reject - } - } - )} - )} - ~ - pathPrefix("target") {( - pathEndOrSingleSlash { - reject - } - ~ - pathPrefix(Segment) { target => ( - pathEndOrSingleSlash { - parameterMap { params => - reject - } - } - )} - )} - )} - )} - - private def parseQuery(params:Map[String,String]) = { - params.get("from") - params.get("to") - params.get("state") - params.get("id") - params.get("name") - params.get("parent_name") - params.get("parent_id") - params.flatMap(kv => "p\\[(.+)\\]".r.unapplySeq(kv._1).flatMap(_.headOption).map(k => (k,kv._2))) - } -} diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/JobHistoryService.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/JobHistoryService.scala new file mode 100644 index 000000000..228922d59 --- /dev/null +++ b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/JobHistoryService.scala @@ -0,0 +1,108 @@ +package com.dimajix.flowman.server.rest + +import akka.http.scaladsl.server +import akka.http.scaladsl.server.Route +import io.swagger.annotations.Api +import io.swagger.annotations.ApiImplicitParam +import io.swagger.annotations.ApiImplicitParams +import io.swagger.annotations.ApiOperation +import io.swagger.annotations.ApiParam +import io.swagger.annotations.ApiResponse +import io.swagger.annotations.ApiResponses +import javax.ws.rs.Path + +import com.dimajix.flowman.history.JobOrder +import com.dimajix.flowman.history.JobQuery +import com.dimajix.flowman.history.StateStore +import com.dimajix.flowman.server.model +import com.dimajix.flowman.server.model.Converter + + +@Api(value = "/job-history", produces = "application/json", consumes = "application/json") +@Path("/job-history") +class JobHistoryService(history:StateStore) { + import akka.http.scaladsl.server.Directives._ + import com.dimajix.flowman.server.model.JsonSupport._ + + def routes : Route = pathPrefix("job-history") {( + pathEndOrSingleSlash { + parameterMap { params => + listJobStates() + } + } + ~ + pathPrefix(Segment) { project => ( + pathEndOrSingleSlash { + parameterMap { params => + listJobStates(project) + } + } + ~ + pathPrefix(Segment) { job => ( + pathEndOrSingleSlash { + parameterMap { params => + listJobStates(project, job) + } + } + )} + )} + )} + + @Path("/") + @ApiOperation(value = "Retrieve general information about a job", nickname = "getAllJobStates", httpMethod = "GET") + @ApiResponses(Array( + new ApiResponse(code = 200, message = "Job information", response = classOf[model.JobState]) + )) + def listJobStates() : server.Route = { + val query = JobQuery() + val jobs = history.findJobs(query, Seq(JobOrder.BY_DATETIME.desc()), 100, 0) + complete(jobs.map(Converter.ofSpec)) + } + + @Path("/{project}") + @ApiOperation(value = "Retrieve general information about a job", nickname = "getAllProjectJobsStates", httpMethod = "GET") + @ApiImplicitParams(Array( + new ApiImplicitParam(name = "project", value = "Project name", required = true, + dataType = "string", paramType = "path") + )) + @ApiResponses(Array( + new ApiResponse(code = 200, message = "Job information", response = classOf[model.JobState]) + )) + def listJobStates(@ApiParam(hidden = true) project:String) : server.Route = { + val query = JobQuery(project=Some(project)) + val jobs = history.findJobs(query, Seq(JobOrder.BY_DATETIME.desc()), 100, 0) + complete(jobs.map(Converter.ofSpec)) + } + + @Path("/{project}/{job}") + @ApiOperation(value = "Retrieve general information about a job", nickname = "getProjectJobState", httpMethod = "GET") + @ApiImplicitParams(Array( + new ApiImplicitParam(name = "project", value = "Project name", required = true, + dataType = "string", paramType = "path"), + new ApiImplicitParam(name = "job", value = "Job name", required = true, + dataType = "string", paramType = "path") + )) + @ApiResponses(Array( + new ApiResponse(code = 200, message = "Job information", response = classOf[model.JobState]) + )) + def listJobStates(@ApiParam(hidden = true) project:String, + @ApiParam(hidden = true) job:String) : server.Route = { + val query = JobQuery( + project=Some(project), + name=Some(job) + ) + val jobs = history.findJobs(query, Seq(JobOrder.BY_DATETIME.desc()), 100, 0) + complete(jobs.map(Converter.ofSpec)) + } + + private def parseQuery(params:Map[String,String]) = { + params.get("from") + params.get("to") + params.get("state") + params.get("id") + params.get("name") + params.get("parent_name") + params.get("parent_id") + params.flatMap(kv => "p\\[(.+)\\]".r.unapplySeq(kv._1).flatMap(_.headOption).map(k => (k,kv._2))) + } +} diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/NamespaceService.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/NamespaceService.scala index 6eebce91c..5d8dd7bb8 100644 --- a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/NamespaceService.scala +++ b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/NamespaceService.scala @@ -23,14 +23,14 @@ import io.swagger.annotations.ApiResponse import io.swagger.annotations.ApiResponses import javax.ws.rs.Path -import com.dimajix.flowman.model.Namespace -import com.dimajix.flowman.server.model +import com.dimajix.flowman.model import com.dimajix.flowman.server.model.Converter +import com.dimajix.flowman.server.model.Namespace @Api(value = "/namespace", produces = "application/json", consumes = "application/json") @Path("/namespace") -class NamespaceService(ns:Namespace) { +class NamespaceService(ns:model.Namespace) { import akka.http.scaladsl.server.Directives._ import com.dimajix.flowman.server.model.JsonSupport._ @@ -42,7 +42,7 @@ class NamespaceService(ns:Namespace) { @ApiOperation(value = "Return information on the current namespace", nickname = "getNamespace", httpMethod = "GET") @ApiResponses(Array( - new ApiResponse(code = 200, message = "Information about namespace", response = classOf[model.Namespace]) + new ApiResponse(code = 200, message = "Information about namespace", response = classOf[Namespace]) )) def info() : server.Route = { get { diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/ProjectService.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/ProjectService.scala index 8b9bc22a3..abce1eaa2 100644 --- a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/ProjectService.scala +++ b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/ProjectService.scala @@ -39,6 +39,8 @@ import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.JobIdentifier import com.dimajix.flowman.server.model import com.dimajix.flowman.server.model.Converter +import com.dimajix.flowman.server.model.Job +import com.dimajix.flowman.server.model.Project import com.dimajix.flowman.storage.Store @@ -97,7 +99,7 @@ class ProjectService(store:Store) { @Path("/") @ApiOperation(value = "Retrieve a list of all projects", nickname = "getProjects", httpMethod = "GET") @ApiResponses(Array( - new ApiResponse(code = 200, message = "Project information", response = classOf[Seq[model.Project]]) + new ApiResponse(code = 200, message = "Project information", response = classOf[Seq[String]]) )) def listProjects(): server.Route = { val result = store.listProjects() diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/Server.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/Server.scala index 11f527019..18747fbc3 100644 --- a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/Server.scala +++ b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/Server.scala @@ -48,7 +48,8 @@ class Server( val namespaceService = new NamespaceService(session.namespace.get) val projectService = new ProjectService(session.store) - val historyService = new HistoryService(session.history) + val jobHistoryService = new JobHistoryService(session.history) + val targetHistoryService = new TargetHistoryService(session.history) val route = ( pathPrefix("api") {( @@ -58,7 +59,9 @@ class Server( ~ projectService.routes ~ - historyService.routes + jobHistoryService.routes + ~ + targetHistoryService.routes )} ~ pathPrefix("swagger") {( diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/SwaggerDocService.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/SwaggerDocService.scala index 7af8bcf7c..dc53509a7 100644 --- a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/SwaggerDocService.scala +++ b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/SwaggerDocService.scala @@ -9,7 +9,8 @@ object SwaggerDocService extends SwaggerHttpService { override def apiClasses = Set( classOf[NamespaceService], classOf[ProjectService], - classOf[HistoryService] + classOf[JobHistoryService], + classOf[TargetHistoryService] ) override def host = "" override def basePath: String = "/api/" diff --git a/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/TargetHistoryService.scala b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/TargetHistoryService.scala new file mode 100644 index 000000000..d861f53df --- /dev/null +++ b/flowman-server/src/main/scala/com/dimajix/flowman/server/rest/TargetHistoryService.scala @@ -0,0 +1,109 @@ +package com.dimajix.flowman.server.rest + +import akka.http.scaladsl.server +import akka.http.scaladsl.server.Route +import io.swagger.annotations.Api +import io.swagger.annotations.ApiImplicitParam +import io.swagger.annotations.ApiImplicitParams +import io.swagger.annotations.ApiOperation +import io.swagger.annotations.ApiParam +import io.swagger.annotations.ApiResponse +import io.swagger.annotations.ApiResponses +import javax.ws.rs.Path + +import com.dimajix.flowman.history.JobOrder +import com.dimajix.flowman.history.JobQuery +import com.dimajix.flowman.history.StateStore +import com.dimajix.flowman.history.TargetOrder +import com.dimajix.flowman.history.TargetQuery +import com.dimajix.flowman.server.model +import com.dimajix.flowman.server.model.Converter + + +@Api(value = "/target-history", produces = "application/json", consumes = "application/json") +@Path("/target-history") +class TargetHistoryService(history:StateStore) { + import akka.http.scaladsl.server.Directives._ + + import com.dimajix.flowman.server.model.JsonSupport._ + + def routes : Route = pathPrefix("target-history") {( + pathEndOrSingleSlash { + listTargetStates() + } + ~ + pathPrefix(Segment) { project => ( + pathEndOrSingleSlash { + parameterMap { params => + listTargetStates(project) + } + } + ~ + pathPrefix(Segment) { target => ( + pathEndOrSingleSlash { + parameterMap { params => + listTargetStates(project, target) + } + } + )} + )} + )} + + @Path("/") + @ApiOperation(value = "Retrieve general information about a job", nickname = "getAllTaretStates", httpMethod = "GET") + @ApiResponses(Array( + new ApiResponse(code = 200, message = "Target information", response = classOf[model.JobState]) + )) + def listTargetStates() : server.Route = { + val query = TargetQuery() + val jobs = history.findTargets(query, Seq(TargetOrder.BY_DATETIME.desc()), 100, 0) + complete(jobs.map(Converter.ofSpec)) + } + + @Path("/{project}") + @ApiOperation(value = "Retrieve general information about a job", nickname = "getAllProjectTargetStates", httpMethod = "GET") + @ApiImplicitParams(Array( + new ApiImplicitParam(name = "project", value = "Project name", required = true, + dataType = "string", paramType = "path") + )) + @ApiResponses(Array( + new ApiResponse(code = 200, message = "Target information", response = classOf[model.JobState]) + )) + def listTargetStates(@ApiParam(hidden = true) project:String) : server.Route = { + val query = TargetQuery(project=Some(project)) + val jobs = history.findTargets(query, Seq(TargetOrder.BY_DATETIME.desc()), 100, 0) + complete(jobs.map(Converter.ofSpec)) + } + + @Path("/{project}/{target}") + @ApiOperation(value = "Retrieve general information about a job", nickname = "getProjectTargetState", httpMethod = "GET") + @ApiImplicitParams(Array( + new ApiImplicitParam(name = "project", value = "Project name", required = true, + dataType = "string", paramType = "path"), + new ApiImplicitParam(name = "target", value = "Target name", required = true, + dataType = "string", paramType = "path") + )) + @ApiResponses(Array( + new ApiResponse(code = 200, message = "Target information", response = classOf[model.JobState]) + )) + def listTargetStates(@ApiParam(hidden = true) project:String, + @ApiParam(hidden = true) target:String) : server.Route = { + val query = TargetQuery( + project=Some(project), + name=Some(target) + ) + val jobs = history.findTargets(query, Seq(TargetOrder.BY_DATETIME.desc()), 100, 0) + complete(jobs.map(Converter.ofSpec)) + } + + private def parseQuery(params:Map[String,String]) = { + params.get("from") + params.get("to") + params.get("state") + params.get("id") + params.get("name") + params.get("parent_name") + params.get("parent_id") + params.flatMap(kv => "p\\[(.+)\\]".r.unapplySeq(kv._1).flatMap(_.headOption).map(k => (k,kv._2))) + } +} diff --git a/flowman-spark-extensions/pom.xml b/flowman-spark-extensions/pom.xml index 395072810..47cbdf588 100644 --- a/flowman-spark-extensions/pom.xml +++ b/flowman-spark-extensions/pom.xml @@ -9,7 +9,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 .. @@ -40,12 +40,31 @@ + + add-site-source + pre-site + + add-source + + + + ${project.basedir}/src/main/spark-${spark-api.version} + + + + + + + com.dimajix.flowman + flowman-scalatest-compat + + com.dimajix.flowman flowman-spark-testing diff --git a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/features.scala b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/features.scala index 358cd40a0..81c8dbd15 100644 --- a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/features.scala +++ b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/features.scala @@ -1,3 +1,19 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package com.dimajix.spark import scala.util.Try diff --git a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/package.scala b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/package.scala new file mode 100644 index 000000000..caa13d2c2 --- /dev/null +++ b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/package.scala @@ -0,0 +1,40 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix + + +package object spark { + final val SPARK_VERSION = org.apache.spark.SPARK_VERSION + final val SPARK_VERSION_MAJOR = majorMinor(SPARK_VERSION)._1 + final val SPARK_VERSION_MINOR = majorMinor(SPARK_VERSION)._2 + + + /** + * Given a Spark version string, return the (major version number, minor version number). + * E.g., for 2.0.1-SNAPSHOT, return (2, 0). + */ + private def majorMinor(sparkVersion: String): (Int, Int) = { + val majorMinorRegex = """^(\d+)\.(\d+)(\..*)?$""".r + majorMinorRegex.findFirstMatchIn(sparkVersion) match { + case Some(m) => + (m.group(1).toInt, m.group(2).toInt) + case None => + throw new IllegalArgumentException(s"Spark tried to parse '$sparkVersion' as a Spark" + + s" version string, but it could not find the major and minor version numbers.") + } + } +} diff --git a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/DataFrameUtils.scala b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/DataFrameUtils.scala index 901cd045e..389d09b22 100644 --- a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/DataFrameUtils.scala +++ b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/DataFrameUtils.scala @@ -1,5 +1,5 @@ /* - * Copyright 2019 Kaya Kupferschmidt + * Copyright 2021 Kaya Kupferschmidt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -16,18 +16,29 @@ package com.dimajix.spark.sql +import scala.collection.JavaConverters._ +import scala.util.Failure +import scala.util.Success + import org.apache.spark.sql.DataFrame import org.apache.spark.sql.Dataset import org.apache.spark.sql.Row import org.apache.spark.sql.SparkSession import org.apache.spark.sql.catalyst.encoders.RowEncoder import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan +import org.apache.spark.sql.catalyst.util.BadRecordException import org.apache.spark.sql.types.StructType +import org.apache.spark.storage.StorageLevel import com.dimajix.spark.sql.catalyst.PlanUtils +import com.dimajix.spark.sql.local.csv.CsvOptions +import com.dimajix.spark.sql.local.csv.UnivocityReader object DataFrameUtils { + private val csvOptions = new CsvOptions(Map()) + private val rowParserOptions = RowParser.Options() + def singleRow(sparkSession: SparkSession, schema: StructType): DataFrame = { val logicalPlan = PlanUtils.singleRowPlan(schema) new Dataset[Row](sparkSession, logicalPlan, RowEncoder(schema)) @@ -38,4 +49,203 @@ object DataFrameUtils { qe.assertAnalyzed() new Dataset[Row](sparkSession, logicalPlan, RowEncoder(qe.analyzed.schema)) } + + def ofRows(sparkSession: SparkSession, rows:Seq[Row], schema:StructType): DataFrame = { + sparkSession.createDataFrame(rows.asJava, schema) + } + + /** + * Temporarily caches a set of DataFrames + * @param input + * @param level + * @param fn + * @tparam T + * @return + */ + def withCaches[T](input:Iterable[DataFrame], level:StorageLevel=StorageLevel.MEMORY_AND_DISK)(fn: => T) : T = { + // Cache all DataFrames, and memorize their original storage levels + val originalPersistedState = input.toSeq.map { df => + val originalStorageLevel = df.storageLevel + if (originalStorageLevel == StorageLevel.NONE && level != StorageLevel.NONE) + df.persist(level) + + df -> originalStorageLevel + } + + val result = try { + fn + } + finally { + // Restore previous storage level + originalPersistedState.foreach { case (df, level) => + if (df.storageLevel != level) { + if (level == StorageLevel.NONE) + df.unpersist() + else + df.persist(level) + } + } + } + + result + } + + def withTempViews[T](input:Iterable[(String,DataFrame)])(fn: => T) : T = { + // Register all input DataFrames as temp views + input.foreach(kv => kv._2.createOrReplaceTempView(kv._1)) + + val result = try { + fn + } + finally { + // Call SessionCatalog.dropTempView to avoid unpersisting the possibly cached dataset. + input.foreach(kv => kv._2.sparkSession.sessionState.catalog.dropTempView(kv._1)) + } + + result + } + + /** + * Creates a DataFrame from a sequence of string array records + * @param sparkSession + * @param lines + * @param schema + * @return + */ + def ofStringValues(sparkSession: SparkSession, lines:Seq[Array[String]], schema:StructType) : DataFrame = { + val reader = new RowParser(schema, rowParserOptions) + val rows = lines.map(reader.parse) + sparkSession.createDataFrame(rows.asJava, schema) + } + + /** + * Create an empty [[DataFrame]] from a schema + * @param sparkSession + * @param schema + * @return + */ + def ofSchema(sparkSession: SparkSession, schema:StructType) : DataFrame = { + val rdd = sparkSession.sparkContext.emptyRDD[Row] + sparkSession.createDataFrame(rdd, schema) + } + + def compare(left:DataFrame, right:DataFrame) : Boolean = { + val leftRows = left.collect().toSeq + val rightRows = right.collect().toSeq + compare(leftRows, rightRows) + } + + /** + * Compare two DataFrames. They are considered to be equal if their schema and all records match. The order of + * the records may be different, though. + * @param left + * @param right + * @return + */ + def compare(left:Seq[Row], right:Seq[Row]) : Boolean = { + normalizeRows(left) == normalizeRows(right) + } + + /** + * Compares two DataFrames and creates a textual diff if they don't contain the same records + * @param left + * @param right + * @return + */ + def diff(expected:DataFrame, actual:DataFrame) : Option[String] = { + val expectedRows = expected.collect().toSeq + val actualRows = actual.collect().toSeq + + if (!compare(expectedRows, actualRows)) + Some(genError(expectedRows, actualRows)) + else + None + } + def diff(expected:Seq[Row], actual:Seq[Row]) : Option[String] = { + if (!compare(expected, actual)) + Some(genError(expected, actual)) + else + None + } + + def diffToStringValues(expected:Seq[Array[String]], actual:DataFrame) : Option[String] = { + val schema = actual.schema + val actualRows = actual.collect() + + val expectedRows = try { + val parser = new RowParser(schema, RowParser.Options()) + Left(expected.map(parser.parse)) + } + catch { + case _:BadRecordException => + Right(s"Cannot parse expected records with actual schema. Actual schema is:\n${schema.treeString}") + } + + expectedRows match { + case Left(expectedRows) => + DataFrameUtils.diff(expectedRows, actualRows) match { + case Some(diff) => + Some(s"Difference between datasets: \n${diff}") + case None => + None + } + case Right(error) => + Some(error) + } + } + + /** + * Converts data to types that we can do equality comparison using Scala collections. For BigDecimal type, + * the Scala type has a better definition of equality test (similar to Java's java.math.BigDecimal.compareTo). + * For binary arrays, we convert it to Seq to avoid of calling java.util.Arrays.equals for equality test. + * @param rows + * @return + */ + def normalizeRows(rows: Seq[Row]): Seq[Row] = { + def prepareRow(row: Row): Row = { + Row.fromSeq(row.toSeq.map { + case null => null + case d: java.math.BigDecimal => BigDecimal(d) + // Convert array to Seq for easy equality checkJob. + case b: Array[_] => b.toSeq + case r: Row => prepareRow(r) + case o => o + }) + } + rows.map(prepareRow).sortBy(_.toString()) + } + + private def sideBySide(left: Seq[String], right: Seq[String]): Seq[String] = { + val maxLeftSize = left.map(_.length).max + val leftPadded = left ++ Seq.fill(math.max(right.size - left.size, 0))("") + val rightPadded = right ++ Seq.fill(math.max(left.size - right.size, 0))("") + + leftPadded.zip(rightPadded).map { + case (l, r) => (if (l == r) " " else "!") + l + (" " * ((maxLeftSize - l.length) + 3)) + r + } + } + + private def genError(expectedAnswer: Seq[Row], + actualAnswer: Seq[Row]): String = { + val getRowType: Option[Row] => String = row => + row.map(row => + if (row.schema == null) { + "struct<>" + } else { + s"${row.schema.catalogString}" + }).getOrElse("struct<>") + + s""" + |== Results == + |${ + sideBySide( + s"== Expected - ${expectedAnswer.size} ==" +: + getRowType(expectedAnswer.headOption) +: + normalizeRows(expectedAnswer).map(_.toString()), + s"== Actual - ${actualAnswer.size} ==" +: + getRowType(actualAnswer.headOption) +: + normalizeRows(actualAnswer).map(_.toString())).mkString("\n") + } + """.stripMargin + } } diff --git a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/RowParser.scala b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/RowParser.scala new file mode 100644 index 000000000..15cf8d904 --- /dev/null +++ b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/RowParser.scala @@ -0,0 +1,214 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.spark.sql + +import java.math.BigDecimal +import java.sql.Timestamp +import java.text.NumberFormat +import java.util.Locale +import java.util.TimeZone + +import scala.util.Try +import scala.util.control.NonFatal + +import org.apache.commons.lang3.time.FastDateFormat +import org.apache.spark.sql.Row +import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema +import org.apache.spark.sql.catalyst.util.BadRecordException +import org.apache.spark.sql.types.BooleanType +import org.apache.spark.sql.types.ByteType +import org.apache.spark.sql.types.DataType +import org.apache.spark.sql.types.DateType +import org.apache.spark.sql.types.Decimal +import org.apache.spark.sql.types.DecimalType +import org.apache.spark.sql.types.DoubleType +import org.apache.spark.sql.types.FloatType +import org.apache.spark.sql.types.IntegerType +import org.apache.spark.sql.types.LongType +import org.apache.spark.sql.types.ShortType +import org.apache.spark.sql.types.StringType +import org.apache.spark.sql.types.StructType +import org.apache.spark.sql.types.TimestampType +import org.apache.spark.unsafe.types.UTF8String + +import com.dimajix.util.DateTimeUtils + + +object RowParser { + object Options { + def apply(addExtraColumns:Boolean = false, + removeExtraColumns:Boolean = false + ) : Options = { + val timeZone = DateTimeUtils.getTimeZone("UTC") + Options( + nullValue = "", + nanValue = "NaN", + negativeInf = "Inf", + positiveInf = "-Inf", + timeZone = timeZone, + timestampFormat = FastDateFormat.getInstance("yyyy-MM-dd'T'HH:mm:ss.SSSXXX", timeZone, Locale.US), + dateFormat = FastDateFormat.getInstance("yyyy-MM-dd", Locale.US), + addExtraColumns = addExtraColumns, + removeExtraColumns = removeExtraColumns + ) + } + } + case class Options( + nullValue:String, + nanValue:String, + negativeInf:String, + positiveInf:String, + timeZone:TimeZone, + timestampFormat:FastDateFormat, + dateFormat:FastDateFormat, + addExtraColumns:Boolean, + removeExtraColumns:Boolean + ) +} + + +class RowParser(schema: StructType, options:RowParser.Options) { + // A `ValueConverter` is responsible for converting the given value to a desired type. + private type ValueConverter = String => Any + private val valueConverters: Array[ValueConverter] = + schema.map(f => makeConverter(f.name, f.dataType, f.nullable)).toArray + private val tokenIndexArr: Array[Int] = + schema.map(f => schema.indexOf(f)).toArray + + /** + * Create a converter which converts the string value to a value according to a desired type. + * Currently, we do not support complex types (`ArrayType`, `MapType`, `StructType`). + * + * For other nullable types, returns null if it is null or equals to the value specified + * in `nullValue` option. + */ + private def makeConverter( + name: String, + dataType: DataType, + nullable: Boolean = true): ValueConverter = dataType match { + case _: ByteType => (d: String) => + nullSafeDatum(d, name, nullable)(_.toByte) + + case _: ShortType => (d: String) => + nullSafeDatum(d, name, nullable)(_.toShort) + + case _: IntegerType => (d: String) => + nullSafeDatum(d, name, nullable)(_.toInt) + + case _: LongType => (d: String) => + nullSafeDatum(d, name, nullable)(_.toLong) + + case _: FloatType => (d: String) => + nullSafeDatum(d, name, nullable) { + case options.nanValue => Float.NaN + case options.negativeInf => Float.NegativeInfinity + case options.positiveInf => Float.PositiveInfinity + case datum => + Try(datum.toFloat) + .getOrElse(NumberFormat.getInstance(Locale.US).parse(datum).floatValue()) + } + + case _: DoubleType => (d: String) => + nullSafeDatum(d, name, nullable) { + case options.nanValue => Double.NaN + case options.negativeInf => Double.NegativeInfinity + case options.positiveInf => Double.PositiveInfinity + case datum => + Try(datum.toDouble) + .getOrElse(NumberFormat.getInstance(Locale.US).parse(datum).doubleValue()) + } + + case _: BooleanType => (d: String) => + nullSafeDatum(d, name, nullable)(_.toBoolean) + + case dt: DecimalType => (d: String) => + nullSafeDatum(d, name, nullable) { datum => + val value = new BigDecimal(datum.replaceAll(",", "")) + Decimal(value, dt.precision, dt.scale) + } + + case _: TimestampType => (d: String) => + nullSafeDatum(d, name, nullable) { datum => + // This one will lose microseconds parts. + // See https://issues.apache.org/jira/browse/SPARK-10681. + Try(new Timestamp(options.timestampFormat.parse(datum).getTime)) + .getOrElse { + // If it fails to parse, then tries the way used in 2.0 and 1.x for backwards + // compatibility. + new Timestamp(DateTimeUtils.stringToTime(datum).getTime) + } + } + + case _: DateType => (d: String) => + nullSafeDatum(d, name, nullable) { datum => + java.sql.Date.valueOf(datum) + } + + case _: StringType => (d: String) => + nullSafeDatum(d, name, nullable) { datum => + datum + } + + // We don't actually hit this exception though, we keep it for understandability + case _ => throw new RuntimeException(s"Unsupported type: ${dataType.typeName}") + } + + private def nullSafeDatum( + datum: String, + name: String, + nullable: Boolean)(converter: ValueConverter): Any = { + if (datum == options.nullValue || datum == null) { + if (!nullable) { + throw new RuntimeException(s"null value found but field $name is not nullable.") + } + null + } else { + converter.apply(datum) + } + } + + def parse(tokens: Array[String]): Row = { + // If the number of tokens doesn't match the schema, we should treat it as a malformed record. + // However, we still have chance to parse some of the tokens, by adding extra null tokens in + // the tail if the number is smaller, or by dropping extra tokens if the number is larger. + if ((tokens.length < schema.length && !options.addExtraColumns) || + (tokens.length > schema.length && !options.removeExtraColumns) + ) { + throw BadRecordException( + () => new UTF8String(), + () => None, + new RuntimeException("Malformed record")) + } + + val checkedTokens = if (schema.length > tokens.length) { + tokens ++ new Array[String](schema.length - tokens.length) + } else { + tokens.take(schema.length) + } + + try { + val values = (0 until schema.length).map { i => + val from = tokenIndexArr(i) + valueConverters(from).apply(checkedTokens(from)) + } + new GenericRowWithSchema(values.toArray, schema) + } catch { + case NonFatal(e) => + throw BadRecordException(() => new UTF8String(), () => None, e) + } + } +} diff --git a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/catalyst/SqlBuilder.scala b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/catalyst/SqlBuilder.scala index 4a1714140..f7a289d95 100644 --- a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/catalyst/SqlBuilder.scala +++ b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/catalyst/SqlBuilder.scala @@ -715,7 +715,7 @@ class SqlBuilder private( case SubqueryAlias(_, a2 @ SubqueryAlias(_,_)) => a2 // Remove subqueries between project and UNION - case p1 @ Project(_, a @ SubqueryAlias(_, u @ Union(_))) => p1.copy(child = u) + case p1 @ Project(_, a @ SubqueryAlias(_, u)) if u.isInstanceOf[Union] => p1.copy(child = u) // Move projections together, they will be merged by CollapseProject case p1 @ Project(_, a @ SubqueryAlias(_, p2 @ Project(_, c))) => p1.copy(child = p2.copy(child = a.copy(child = c))) diff --git a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/CsvRelation.scala b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/CsvRelation.scala index 9c26456d3..fc44426ea 100644 --- a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/CsvRelation.scala +++ b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/CsvRelation.scala @@ -77,6 +77,6 @@ class CsvRelation(context: SQLContext, files:Seq[File], options:CsvOptions, msch private def readFile(file:File) : Iterator[Row] = { val lines = Source.fromFile(file, options.encoding).getLines() val parser = new UnivocityReader(schema, options) - UnivocityReader.parseIterator(lines, parser.options.headerFlag, parser, schema) + UnivocityReader.parseIterator(lines, parser.options.headerFlag, parser) } } diff --git a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/UnivocityReader.scala b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/UnivocityReader.scala index c046de0cb..69f0eb6ab 100644 --- a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/UnivocityReader.scala +++ b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/UnivocityReader.scala @@ -1,21 +1,5 @@ /* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/* - * Copyright 2018 Kaya Kupferschmidt + * Copyright 2021 Kaya Kupferschmidt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -59,158 +43,34 @@ import org.apache.spark.sql.types.StructType import org.apache.spark.sql.types.TimestampType import org.apache.spark.unsafe.types.UTF8String +import com.dimajix.spark.sql.RowParser import com.dimajix.util.DateTimeUtils class UnivocityReader(schema: StructType, val options:CsvOptions) { - // A `ValueConverter` is responsible for converting the given value to a desired type. - private type ValueConverter = String => Any - private val tokenizer = new CsvParser(options.asParserSettings) private val settings = options.asParserSettings settings.setHeaders(schema.fieldNames: _*) - private val valueConverters: Array[ValueConverter] = - schema.map(f => makeConverter(f.name, f.dataType, f.nullable, options)).toArray - - private val tokenIndexArr: Array[Int] = { - schema.map(f => schema.indexOf(f)).toArray - } - - /** - * Create a converter which converts the string value to a value according to a desired type. - * Currently, we do not support complex types (`ArrayType`, `MapType`, `StructType`). - * - * For other nullable types, returns null if it is null or equals to the value specified - * in `nullValue` option. - */ - def makeConverter( - name: String, - dataType: DataType, - nullable: Boolean = true, - options: CsvOptions): ValueConverter = dataType match { - case _: ByteType => (d: String) => - nullSafeDatum(d, name, nullable, options)(_.toByte) - - case _: ShortType => (d: String) => - nullSafeDatum(d, name, nullable, options)(_.toShort) - - case _: IntegerType => (d: String) => - nullSafeDatum(d, name, nullable, options)(_.toInt) - - case _: LongType => (d: String) => - nullSafeDatum(d, name, nullable, options)(_.toLong) - - case _: FloatType => (d: String) => - nullSafeDatum(d, name, nullable, options) { - case options.nanValue => Float.NaN - case options.negativeInf => Float.NegativeInfinity - case options.positiveInf => Float.PositiveInfinity - case datum => - Try(datum.toFloat) - .getOrElse(NumberFormat.getInstance(Locale.US).parse(datum).floatValue()) - } - - case _: DoubleType => (d: String) => - nullSafeDatum(d, name, nullable, options) { - case options.nanValue => Double.NaN - case options.negativeInf => Double.NegativeInfinity - case options.positiveInf => Double.PositiveInfinity - case datum => - Try(datum.toDouble) - .getOrElse(NumberFormat.getInstance(Locale.US).parse(datum).doubleValue()) - } - - case _: BooleanType => (d: String) => - nullSafeDatum(d, name, nullable, options)(_.toBoolean) - - case dt: DecimalType => (d: String) => - nullSafeDatum(d, name, nullable, options) { datum => - val value = new BigDecimal(datum.replaceAll(",", "")) - Decimal(value, dt.precision, dt.scale) - } - - case _: TimestampType => (d: String) => - nullSafeDatum(d, name, nullable, options) { datum => - // This one will lose microseconds parts. - // See https://issues.apache.org/jira/browse/SPARK-10681. - Try(options.timestampFormat.parse(datum).getTime * 1000L) - .getOrElse { - // If it fails to parse, then tries the way used in 2.0 and 1.x for backwards - // compatibility. - DateTimeUtils.stringToTime(datum).getTime * 1000L - } - } - - case _: DateType => (d: String) => - nullSafeDatum(d, name, nullable, options) { datum => - // This one will lose microseconds parts. - // See https://issues.apache.org/jira/browse/SPARK-10681.x - Try(DateTimeUtils.millisToDays(options.dateFormat.parse(datum).getTime)) - .getOrElse { - // If it fails to parse, then tries the way used in 2.0 and 1.x for backwards - // compatibility. - DateTimeUtils.millisToDays(DateTimeUtils.stringToTime(datum).getTime) - } - } - - case _: StringType => (d: String) => - nullSafeDatum(d, name, nullable, options)(UTF8String.fromString) - - // We don't actually hit this exception though, we keep it for understandability - case _ => throw new RuntimeException(s"Unsupported type: ${dataType.typeName}") - } - - private def nullSafeDatum( - datum: String, - name: String, - nullable: Boolean, - options: CsvOptions)(converter: ValueConverter): Any = { - if (datum == options.nullValue || datum == null) { - if (!nullable) { - throw new RuntimeException(s"null value found but field $name is not nullable.") - } - null - } else { - converter.apply(datum) - } - } + private val parser = new RowParser(schema, + RowParser.Options( + nullValue = options.nullValue, + nanValue = options.nanValue, + negativeInf = options.negativeInf, + positiveInf = options.positiveInf, + timeZone = options.timeZone, + timestampFormat = options.timestampFormat, + dateFormat = options.dateFormat, + addExtraColumns = false, + removeExtraColumns = false + )) /** * Parses a single CSV string and turns it into either one resulting row or no row (if the * the record is malformed). */ - def parse(input: String): Row = convert(tokenizer.parseLine(input)) - - private def convert(tokens: Array[String]): Row = { - if (tokens.length != schema.length) { - // If the number of tokens doesn't match the schema, we should treat it as a malformed record. - // However, we still have chance to parse some of the tokens, by adding extra null tokens in - // the tail if the number is smaller, or by dropping extra tokens if the number is larger. - val checkedTokens = if (schema.length > tokens.length) { - tokens ++ new Array[String](schema.length - tokens.length) - } else { - tokens.take(schema.length) - } - throw BadRecordException( - () => new UTF8String(), - () => None, - new RuntimeException("Malformed CSV record")) - } - else { - try { - val values = (0 until schema.length).map { i => - val from = tokenIndexArr(i) - valueConverters(from).apply(tokens(from)) - } - Row(values:_*) - } catch { - case NonFatal(e) => - throw BadRecordException(() => new UTF8String(), () => None, e) - } - } - } + def parse(input: String): Row = parser.parse(tokenizer.parseLine(input)) } @@ -256,8 +116,7 @@ object UnivocityReader { def parseIterator( lines: Iterator[String], shouldDropHeader: Boolean, - parser: UnivocityReader, - schema: StructType): Iterator[Row] = { + parser: UnivocityReader): Iterator[Row] = { val options = parser.options val linesWithoutHeader = if (shouldDropHeader) { diff --git a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/UnivocityWriter.scala b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/UnivocityWriter.scala index 092bd9bbd..f15a6bb7c 100644 --- a/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/UnivocityWriter.scala +++ b/flowman-spark-extensions/src/main/scala/com/dimajix/spark/sql/local/csv/UnivocityWriter.scala @@ -76,10 +76,10 @@ class UnivocityWriter(schema: StructType, writer:Writer, options:CsvOptions) ext private def makeConverter(dataType: DataType): ValueConverter = dataType match { case DateType => (row: Row, ordinal: Int) => - options.dateFormat.format(DateTimeUtils.toJavaDate(row.getInt(ordinal))) + options.dateFormat.format(row.getDate(ordinal)) case TimestampType => (row: Row, ordinal: Int) => - options.timestampFormat.format(DateTimeUtils.toJavaTimestamp(row.getLong(ordinal))) + options.timestampFormat.format(row.getTimestamp(ordinal)) case BooleanType => (row: Row, ordinal: Int) => row.getBoolean(ordinal).toString diff --git a/flowman-spark-extensions/src/main/scala/com/dimajix/util/DateTimeUtils.scala b/flowman-spark-extensions/src/main/scala/com/dimajix/util/DateTimeUtils.scala index 09d1fda89..5fa0b5105 100644 --- a/flowman-spark-extensions/src/main/scala/com/dimajix/util/DateTimeUtils.scala +++ b/flowman-spark-extensions/src/main/scala/com/dimajix/util/DateTimeUtils.scala @@ -34,13 +34,35 @@ package com.dimajix.util import java.sql.Date import java.sql.Timestamp +import java.time.ZoneId +import java.util.TimeZone import scala.annotation.tailrec import javax.xml.bind.DatatypeConverter +import org.apache.spark.sql.SparkShim object DateTimeUtils { + final val SECONDS_PER_MINUTE = 60L + final val SECONDS_PER_HOUR = 60 * 60L + final val SECONDS_PER_DAY = SECONDS_PER_HOUR * 24L + + final val MILLIS_PER_SECOND = 1000L + final val MILLIS_PER_MINUTE = 1000L * 60L + final val MILLIS_PER_HOUR = SECONDS_PER_HOUR * 1000L + final val MILLIS_PER_DAY = SECONDS_PER_DAY * 1000L + + final val MICROS_PER_MILLIS = 1000L + final val MICROS_PER_SECOND = MICROS_PER_MILLIS * MILLIS_PER_SECOND + final val MICROS_PER_MINUTE = MICROS_PER_MILLIS * MILLIS_PER_MINUTE + final val MICROS_PER_HOUR = MICROS_PER_MILLIS * MILLIS_PER_HOUR + final val MICROS_PER_DAY = MICROS_PER_SECOND * SECONDS_PER_DAY + + final val NANOS_PER_SECOND = MICROS_PER_SECOND * 1000L + final val NANOS_PER_MICROS = 1000L + final val NANOS_PER_MILLIS = NANOS_PER_MICROS * 1000L + @tailrec def stringToTime(s: String): java.util.Date = { val indexOfGMT = s.indexOf("GMT") @@ -63,6 +85,15 @@ object DateTimeUtils { } def millisToDays(millisUtc: Long): Int = { - org.apache.spark.sql.catalyst.util.DateTimeUtils.millisToDays(millisUtc) + millisToDays(millisUtc, TimeZone.getDefault()) + } + + def millisToDays(millisUtc: Long, timeZone: TimeZone): Int = { + SparkShim.millisToDays(millisUtc, timeZone) + } + + def getZoneId(timeZoneId: String): ZoneId = ZoneId.of(timeZoneId, ZoneId.SHORT_IDS) + def getTimeZone(timeZoneId: String): TimeZone = { + TimeZone.getTimeZone(getZoneId(timeZoneId)) } } diff --git a/flowman-spark-extensions/src/main/spark-2.3/org/apache/spark/sql/SparkShim.scala b/flowman-spark-extensions/src/main/spark-2.3/org/apache/spark/sql/SparkShim.scala index 6b7bc4e84..cd93e998f 100644 --- a/flowman-spark-extensions/src/main/spark-2.3/org/apache/spark/sql/SparkShim.scala +++ b/flowman-spark-extensions/src/main/spark-2.3/org/apache/spark/sql/SparkShim.scala @@ -16,6 +16,8 @@ package org.apache.spark.sql +import java.util.TimeZone + import org.apache.spark.SparkConf import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.sql.execution.QueryExecution @@ -28,12 +30,22 @@ import org.apache.spark.sql.sources.RelationProvider import org.apache.spark.sql.sources.SchemaRelationProvider import org.apache.spark.unsafe.types.CalendarInterval +import com.dimajix.util.DateTimeUtils + object SparkShim { def getHadoopConf(sparkConf:SparkConf) :org.apache.hadoop.conf.Configuration = SparkHadoopUtil.get.newConfiguration(sparkConf) def parseCalendarInterval(str:String) : CalendarInterval = CalendarInterval.fromString(str) + def calendarInterval(months:Int, days:Int, microseconds:Long=0L) : CalendarInterval = { + new CalendarInterval(months, microseconds + days*DateTimeUtils.MICROS_PER_DAY) + } + + def millisToDays(millisUtc: Long, timeZone: TimeZone): Int = { + org.apache.spark.sql.catalyst.util.DateTimeUtils.millisToDays(millisUtc, timeZone) + } + def isStaticConf(key:String) : Boolean = { SQLConf.staticConfKeys.contains(key) } diff --git a/flowman-spark-extensions/src/main/spark-2.4/org/apache/spark/sql/SparkShim.scala b/flowman-spark-extensions/src/main/spark-2.4/org/apache/spark/sql/SparkShim.scala index 6b7bc4e84..cd93e998f 100644 --- a/flowman-spark-extensions/src/main/spark-2.4/org/apache/spark/sql/SparkShim.scala +++ b/flowman-spark-extensions/src/main/spark-2.4/org/apache/spark/sql/SparkShim.scala @@ -16,6 +16,8 @@ package org.apache.spark.sql +import java.util.TimeZone + import org.apache.spark.SparkConf import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.sql.execution.QueryExecution @@ -28,12 +30,22 @@ import org.apache.spark.sql.sources.RelationProvider import org.apache.spark.sql.sources.SchemaRelationProvider import org.apache.spark.unsafe.types.CalendarInterval +import com.dimajix.util.DateTimeUtils + object SparkShim { def getHadoopConf(sparkConf:SparkConf) :org.apache.hadoop.conf.Configuration = SparkHadoopUtil.get.newConfiguration(sparkConf) def parseCalendarInterval(str:String) : CalendarInterval = CalendarInterval.fromString(str) + def calendarInterval(months:Int, days:Int, microseconds:Long=0L) : CalendarInterval = { + new CalendarInterval(months, microseconds + days*DateTimeUtils.MICROS_PER_DAY) + } + + def millisToDays(millisUtc: Long, timeZone: TimeZone): Int = { + org.apache.spark.sql.catalyst.util.DateTimeUtils.millisToDays(millisUtc, timeZone) + } + def isStaticConf(key:String) : Boolean = { SQLConf.staticConfKeys.contains(key) } diff --git a/flowman-spark-extensions/src/main/spark-3.0/org/apache/spark/sql/SparkShim.scala b/flowman-spark-extensions/src/main/spark-3.0/org/apache/spark/sql/SparkShim.scala index be4fac3bb..ed6b6aba4 100644 --- a/flowman-spark-extensions/src/main/spark-3.0/org/apache/spark/sql/SparkShim.scala +++ b/flowman-spark-extensions/src/main/spark-3.0/org/apache/spark/sql/SparkShim.scala @@ -16,6 +16,8 @@ package org.apache.spark.sql +import java.util.TimeZone + import org.apache.spark.SparkConf import org.apache.spark.deploy.SparkHadoopUtil import org.apache.spark.internal.config.ConfigEntry @@ -30,12 +32,21 @@ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.sources.RelationProvider import org.apache.spark.sql.sources.SchemaRelationProvider import org.apache.spark.unsafe.types.CalendarInterval +import org.apache.spark.unsafe.types.UTF8String object SparkShim { def getHadoopConf(sparkConf:SparkConf) :org.apache.hadoop.conf.Configuration = SparkHadoopUtil.get.newConfiguration(sparkConf) - def parseCalendarInterval(str:String) : CalendarInterval = IntervalUtils.fromDayTimeString(str) + def parseCalendarInterval(str:String) : CalendarInterval = IntervalUtils.stringToInterval(UTF8String.fromString(str)) + + def calendarInterval(months:Int, days:Int, microseconds:Long=0L) : CalendarInterval = { + new CalendarInterval(months, days, microseconds) + } + + def millisToDays(millisUtc: Long, timeZone: TimeZone): Int = { + org.apache.spark.sql.catalyst.util.DateTimeUtils.millisToDays(millisUtc, timeZone.toZoneId) + } def isStaticConf(key:String) : Boolean = { SQLConf.staticConfKeys.contains(key) || diff --git a/flowman-spark-extensions/src/main/spark-3.1/com/dimajix/spark/expressions/CreateNullableStruct.scala b/flowman-spark-extensions/src/main/spark-3.1/com/dimajix/spark/expressions/CreateNullableStruct.scala new file mode 100644 index 000000000..976618813 --- /dev/null +++ b/flowman-spark-extensions/src/main/spark-3.1/com/dimajix/spark/expressions/CreateNullableStruct.scala @@ -0,0 +1,178 @@ +/* + * Copyright 2019 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.spark.expressions + +import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder +import org.apache.spark.sql.catalyst.analysis.TypeCheckResult +import org.apache.spark.sql.catalyst.expressions.Alias +import org.apache.spark.sql.catalyst.expressions.CreateNamedStruct +import org.apache.spark.sql.catalyst.expressions.EmptyRow +import org.apache.spark.sql.catalyst.expressions.Expression +import org.apache.spark.sql.catalyst.expressions.ExpressionDescription +import org.apache.spark.sql.catalyst.expressions.GenericInternalRow +import org.apache.spark.sql.catalyst.expressions.Literal +import org.apache.spark.sql.catalyst.expressions.NamePlaceholder +import org.apache.spark.sql.catalyst.expressions.NamedExpression +import org.apache.spark.sql.catalyst.expressions.codegen.CodegenContext +import org.apache.spark.sql.catalyst.expressions.codegen.ExprCode +import org.apache.spark.sql.types.Metadata +import org.apache.spark.sql.types.StringType +import org.apache.spark.sql.types.StructField +import org.apache.spark.sql.types.StructType + + +/** + * Returns a Row containing the evaluation of all children expressions. + */ +object CreateNullableStruct extends FunctionBuilder { + def apply(children: Seq[Expression]): CreateNullableNamedStruct = { + CreateNullableNamedStruct(children.zipWithIndex.flatMap { + //case (e: NamedExpression, _) if e.resolved => Seq(Literal(e.name), e) + case (e: NamedExpression, _) if e.name.nonEmpty => Seq(Literal(e.name), e) + case (e: NamedExpression, _) => Seq(NamePlaceholder, e) + case (e, index) => Seq(Literal(s"col${index + 1}"), e) + }) + } +} + +/** + * Creates a struct with the given field names and values + * + * @param children Seq(name1, val1, name2, val2, ...) + */ +// scalastyle:off line.size.limit +@ExpressionDescription( + usage = "_FUNC_(name1, val1, name2, val2, ...) - Creates a struct with the given field names and values.", + examples = """ + Examples: + > SELECT _FUNC_("a", 1, "b", 2, "c", 3); + {"a":1,"b":2,"c":3} + """) +// scalastyle:on line.size.limit +case class CreateNullableNamedStruct(override val children: Seq[Expression]) extends Expression { + lazy val (nameExprs, valExprs) = children.grouped(2).map { + case Seq(name, value) => (name, value) + }.toList.unzip + + lazy val names = nameExprs.map(_.eval(EmptyRow)) + + override def nullable: Boolean = true + + override def foldable: Boolean = valExprs.forall(_.foldable) + + override lazy val dataType: StructType = { + val fields = names.zip(valExprs).map { + case (name, expr) => + val metadata = expr match { + case ne: NamedExpression => ne.metadata + case _ => Metadata.empty + } + StructField(name.toString, expr.dataType, expr.nullable, metadata) + } + StructType(fields) + } + + override def checkInputDataTypes(): TypeCheckResult = { + if (children.size % 2 != 0) { + TypeCheckResult.TypeCheckFailure(s"$prettyName expects an even number of arguments.") + } else { + val invalidNames = nameExprs.filterNot(e => e.foldable && e.dataType == StringType) + if (invalidNames.nonEmpty) { + TypeCheckResult.TypeCheckFailure( + s"Only foldable ${StringType.catalogString} expressions are allowed to appear at odd" + + s" position, got: ${invalidNames.mkString(",")}") + } else if (!names.contains(null)) { + TypeCheckResult.TypeCheckSuccess + } else { + TypeCheckResult.TypeCheckFailure("Field name should not be null") + } + } + } + + /** + * Returns Aliased [[Expression]]s that could be used to construct a flattened version of this + * StructType. + */ + def flatten: Seq[NamedExpression] = valExprs.zip(names).map { + case (v, n) => Alias(v, n.toString)() + } + + override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { + val rowClass = classOf[GenericInternalRow].getName + val values = ctx.freshName("values") + val nonnull = ctx.freshName("nonnull") + val evals = valExprs.zipWithIndex.map { case (e, i) => + val eval = e.genCode(ctx) + s""" + |${eval.code} + |if (${eval.isNull}) { + | $values[$i] = null; + |} else { + | $values[$i] = ${eval.value}; + | $nonnull = true; + |} + """.stripMargin + } + val codes = ctx.splitExpressionsWithCurrentInputs( + expressions = evals, + funcName = "nullable_struct", + returnType = "boolean", + extraArguments = "Object[]" -> values :: "boolean" -> nonnull :: Nil, + makeSplitFunction = body => + s""" + |do { + | $body + |} while (false); + |return $nonnull; + """.stripMargin, + foldFunctions = _.map { funcCall => + s""" + |$nonnull = $funcCall; + """.stripMargin + }.mkString + ) + + import org.apache.spark.sql.catalyst.expressions.codegen.Block.BlockHelper + val code = + code""" + |Object[] $values = new Object[${valExprs.size}]; + |boolean $nonnull = false; + |do { + | $codes + |} while (false); + | + |boolean ${ev.isNull} = !$nonnull; + |InternalRow ${ev.value} = null; + |if (!${ev.isNull}) { + | ${ev.value} = new $rowClass($values); + |} + |$values = null; + """.stripMargin + ev.copy(code = code) + } + + override def prettyName: String = "named_nullable_struct" + + override def eval(input: InternalRow): Any = { + val values = valExprs.map(_.eval(input)) + if (values.forall(_ == null)) + null + else + InternalRow(values: _*) + } +} diff --git a/flowman-spark-extensions/src/main/spark-3.1/org/apache/spark/sql/SparkShim.scala b/flowman-spark-extensions/src/main/spark-3.1/org/apache/spark/sql/SparkShim.scala new file mode 100644 index 000000000..4467d277f --- /dev/null +++ b/flowman-spark-extensions/src/main/spark-3.1/org/apache/spark/sql/SparkShim.scala @@ -0,0 +1,89 @@ +/* + * Copyright 2018-2019 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql + +import java.time.Instant +import java.time.LocalDateTime +import java.util.TimeZone + +import org.apache.spark.SparkConf +import org.apache.spark.deploy.SparkHadoopUtil +import org.apache.spark.internal.config.ConfigEntry +import org.apache.spark.sql.catalyst.analysis.ViewType +import org.apache.spark.sql.catalyst.util.DateTimeUtils.localDateToDays +import org.apache.spark.sql.catalyst.util.DateTimeUtils.microsToInstant +import org.apache.spark.sql.catalyst.util.IntervalUtils +import org.apache.spark.sql.execution.QueryExecution +import org.apache.spark.sql.execution.SQLExecution +import org.apache.spark.sql.execution.datasources.DataSource +import org.apache.spark.sql.execution.datasources.FileFormat +import org.apache.spark.sql.execution.datasources.v2.FileDataSourceV2 +import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.sql.sources.RelationProvider +import org.apache.spark.sql.sources.SchemaRelationProvider +import org.apache.spark.unsafe.types.CalendarInterval +import org.apache.spark.unsafe.types.UTF8String + +import com.dimajix.util.DateTimeUtils + + +object SparkShim { + def getHadoopConf(sparkConf:SparkConf) :org.apache.hadoop.conf.Configuration = SparkHadoopUtil.get.newConfiguration(sparkConf) + + def parseCalendarInterval(str:String) : CalendarInterval = IntervalUtils.stringToInterval(UTF8String.fromString(str)) + + def calendarInterval(months:Int, days:Int, microseconds:Long=0L) : CalendarInterval = { + new CalendarInterval(months, days, microseconds) + } + + def millisToDays(millisUtc: Long, timeZone: TimeZone): Int = { + val secs = Math.floorDiv(millisUtc, DateTimeUtils.MILLIS_PER_SECOND) + val mos = Math.floorMod(millisUtc, DateTimeUtils.MILLIS_PER_SECOND) + val instant = Instant.ofEpochSecond(secs, mos * DateTimeUtils.NANOS_PER_MILLIS) + Math.toIntExact(LocalDateTime.ofInstant(instant, timeZone.toZoneId()).toLocalDate.toEpochDay) + } + + def isStaticConf(key:String) : Boolean = { + SQLConf.staticConfKeys.contains(key) || + (ConfigEntry.findEntry(key) != null && !SQLConf.sqlConfEntries.containsKey(key)) + } + + def relationSupportsMultiplePaths(spark:SparkSession, format:String) : Boolean = { + val providingClass = DataSource.lookupDataSource(format, spark.sessionState.conf) + relationSupportsMultiplePaths(providingClass) + } + + def relationSupportsMultiplePaths(providingClass:Class[_]) : Boolean = { + providingClass.newInstance() match { + case _: RelationProvider => false + case _: SchemaRelationProvider => false + case _: FileFormat => true + case _: FileDataSourceV2 => true + case _ => false + } + } + + def withNewExecutionId[T]( + sparkSession: SparkSession, + queryExecution: QueryExecution, + name: Option[String] = None)(body: => T): T = + SQLExecution.withNewExecutionId(queryExecution, name)(body) + + val LocalTempView : ViewType = org.apache.spark.sql.catalyst.analysis.LocalTempView + val GlobalTempView : ViewType = org.apache.spark.sql.catalyst.analysis.GlobalTempView + val PersistedView : ViewType = org.apache.spark.sql.catalyst.analysis.PersistedView +} diff --git a/flowman-spark-extensions/src/main/spark-3.1/org/apache/spark/sql/catalyst/optimizer/PushDownPredicate.scala b/flowman-spark-extensions/src/main/spark-3.1/org/apache/spark/sql/catalyst/optimizer/PushDownPredicate.scala new file mode 100644 index 000000000..181541104 --- /dev/null +++ b/flowman-spark-extensions/src/main/spark-3.1/org/apache/spark/sql/catalyst/optimizer/PushDownPredicate.scala @@ -0,0 +1,10 @@ +package org.apache.spark.sql.catalyst.optimizer + +import org.apache.spark.sql.catalyst.expressions.PredicateHelper +import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan +import org.apache.spark.sql.catalyst.rules.Rule + + +object PushDownPredicate extends Rule[LogicalPlan] with PredicateHelper { + def apply(plan: LogicalPlan): LogicalPlan = plan transform PushPredicateThroughNonJoin.applyLocally +} diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/NullableStructTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/NullableStructTest.scala index 92bf520ac..4c81e0ac2 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/NullableStructTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/NullableStructTest.scala @@ -18,8 +18,8 @@ package com.dimajix.spark import org.apache.spark.sql.Row import org.apache.spark.sql.functions._ -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.sql.functions._ import com.dimajix.spark.testing.LocalSparkSession @@ -29,7 +29,7 @@ object NullableStructTest { case class Person(name:String, age:Option[Int]) } -class NullableStructTest extends FlatSpec with Matchers with LocalSparkSession { +class NullableStructTest extends AnyFlatSpec with Matchers with LocalSparkSession { import NullableStructTest.Person "The nullable_struct function" should "return non-null values" in { diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/accumulator/CounterAccumulatorTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/accumulator/CounterAccumulatorTest.scala index 8e29f1417..c47599e62 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/accumulator/CounterAccumulatorTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/accumulator/CounterAccumulatorTest.scala @@ -16,11 +16,11 @@ package com.dimajix.spark.accumulator -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class CounterAccumulatorTest extends FlatSpec with Matchers { +class CounterAccumulatorTest extends AnyFlatSpec with Matchers { "The CounterAccumulator" should "accumulate individual values" in { val accumulator = new CounterAccumulator() accumulator.get("a") should be (None) diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/DataFrameUtilsTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/DataFrameUtilsTest.scala new file mode 100644 index 000000000..65c287b3a --- /dev/null +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/DataFrameUtilsTest.scala @@ -0,0 +1,188 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.spark.sql + +import java.sql.Date +import java.sql.Timestamp + +import org.apache.spark.sql.Row +import org.apache.spark.sql.types.DateType +import org.apache.spark.sql.types.DoubleType +import org.apache.spark.sql.types.IntegerType +import org.apache.spark.sql.types.StringType +import org.apache.spark.sql.types.StructField +import org.apache.spark.sql.types.StructType +import org.apache.spark.sql.types.TimestampType +import org.apache.spark.storage.StorageLevel +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.spark.testing.LocalSparkSession + + +class DataFrameUtilsTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "DataFrameUtils.ofStringValues" should "create a DataFrame" in { + val lines = Seq( + Array("1","lala","2.3","2019-02-01","2019-02-01T12:34:00.000"), + Array("2","","3.4","","2019-02-01T12:34:00"), + Array("","","","",""), + Array(null:String,null,null,null,null) + ) + val schema = StructType(Seq( + StructField("c1", IntegerType), + StructField("c2", StringType), + StructField("c3", DoubleType), + StructField("c4", DateType), + StructField("c5", TimestampType) + )) + val df = DataFrameUtils.ofStringValues(spark, lines, schema) + + df.collect() should be (Seq( + Row(1,"lala",2.3, Date.valueOf("2019-02-01"),Timestamp.valueOf("2019-02-01 12:34:00")), + Row(2,null,3.4,null,Timestamp.valueOf("2019-02-01 12:34:00")), + Row(null,null,null,null,null), + Row(null,null,null,null,null) + )) + df.schema should be (schema) + } + + "DataFrameUtils.ofSchema" should "create an empty DataFrame" in { + val schema = StructType(Seq( + StructField("c1", IntegerType), + StructField("c2", StringType), + StructField("c3", DoubleType), + StructField("c4", DateType) + )) + val df = DataFrameUtils.ofSchema(spark, schema) + + df.collect() should be (Seq()) + df.schema should be (schema) + } + + "DataFrameUtils.compare" should "work" in { + val schema = StructType(Seq( + StructField("c1", IntegerType), + StructField("c2", StringType), + StructField("c3", DoubleType), + StructField("c4", DateType) + )) + val lines = Seq( + Array("1","lala","2.3","2019-02-01"), + Array("2","","3.4",""), + Array("",null,"",null) + ) + val df1 = DataFrameUtils.ofStringValues(spark, lines, schema) + val df2 = DataFrameUtils.ofStringValues(spark, lines, schema) + + DataFrameUtils.compare(df1, df2) should be (true) + DataFrameUtils.compare(df1.limit(2), df2) should be (false) + DataFrameUtils.compare(df1, df2.limit(2)) should be (false) + DataFrameUtils.compare(df1.drop("c1"), df2) should be (false) + DataFrameUtils.compare(df1, df2.drop("c1")) should be (false) + } + + "DataFrameUtils.diff" should "work" in { + val schema = StructType(Seq( + StructField("c1", IntegerType), + StructField("c2", StringType), + StructField("c3", DoubleType), + StructField("c4", DateType) + )) + val lines = Seq( + Array("1","lala","2.3","2019-02-01"), + Array("2","","3.4",""), + Array("",null,"",null) + ) + val df1 = DataFrameUtils.ofStringValues(spark, lines, schema) + val df2 = DataFrameUtils.ofStringValues(spark, lines, schema) + + DataFrameUtils.diff(df1, df2) should be (None) + DataFrameUtils.diff(df1.limit(2), df2) should not be (None) + DataFrameUtils.diff(df1, df2.limit(2)) should not be (None) + } + + "DataFrameUtils.diffToStringValues" should "work" in { + val schema = StructType(Seq( + StructField("c1", IntegerType), + StructField("c2", StringType), + StructField("c3", DoubleType), + StructField("c4", DateType) + )) + val lines = Seq( + Array("1","lala","2.3","2019-02-01"), + Array("2","","3.4",""), + Array("",null,"",null) + ) + val df1 = DataFrameUtils.ofStringValues(spark, lines, schema) + + DataFrameUtils.diffToStringValues(lines, df1) should be (None) + DataFrameUtils.diffToStringValues(lines, df1.limit(2)) should not be (None) + DataFrameUtils.diffToStringValues(lines.take(2), df1) should not be (None) + } + + "DataFrameUtils.withCaches" should "cache und uncache DataFrames" in { + val df = spark.emptyDataFrame + + df.storageLevel should be (StorageLevel.NONE) + + DataFrameUtils.withCaches(Seq(df)) { + df.storageLevel should be (StorageLevel.MEMORY_AND_DISK) + } + + df.storageLevel should be (StorageLevel.NONE) + } + + it should "cache und uncache in presence of exceptions" in { + val df = spark.emptyDataFrame + + df.storageLevel should be (StorageLevel.NONE) + + an[IllegalArgumentException] should be thrownBy( + DataFrameUtils.withCaches(Seq(df)) { + df.storageLevel should be (StorageLevel.MEMORY_AND_DISK) + throw new IllegalArgumentException() + }) + + df.storageLevel should be (StorageLevel.NONE) + } + + "DataFrameUtils.withTempViews" should "create and unregister temp views" in { + val df = spark.emptyDataFrame + + spark.sessionState.catalog.getTempView("temp") should be (None) + + DataFrameUtils.withTempViews(Seq("temp" -> df)) { + spark.sessionState.catalog.getTempView("temp") should be (Some(df.queryExecution.logical)) + } + + spark.sessionState.catalog.getTempView("temp") should be (None) + } + + it should "create and unregister temp views in presence of exceptions" in { + val df = spark.emptyDataFrame + + spark.sessionState.catalog.getTempView("temp") should be (None) + + an[IllegalArgumentException] should be thrownBy( + DataFrameUtils.withTempViews(Seq("temp" -> df)) { + spark.sessionState.catalog.getTempView("temp") should be (Some(df.queryExecution.logical)) + throw new IllegalArgumentException() + }) + + spark.sessionState.catalog.getTempView("temp") should be (None) + } +} diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/FunctionsTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/FunctionsTest.scala index 0b4b89549..7f53d304e 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/FunctionsTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/FunctionsTest.scala @@ -16,14 +16,14 @@ package com.dimajix.spark.sql -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.sql.execution.ExtraStrategies -import com.dimajix.spark.testing.LocalSparkSession import com.dimajix.spark.sql.functions._ +import com.dimajix.spark.testing.LocalSparkSession -class FunctionsTest extends FlatSpec with Matchers with LocalSparkSession { +class FunctionsTest extends AnyFlatSpec with Matchers with LocalSparkSession { "count_records" should "work" in { ExtraStrategies.register(spark) val df = spark.createDataFrame(Seq((1,2), (3,4))) diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/RowParserTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/RowParserTest.scala new file mode 100644 index 000000000..671862ff1 --- /dev/null +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/RowParserTest.scala @@ -0,0 +1,130 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.spark.sql + +import java.sql.Date +import java.sql.Timestamp + +import org.apache.spark.sql.Row +import org.apache.spark.sql.catalyst.util.BadRecordException +import org.apache.spark.sql.types.DateType +import org.apache.spark.sql.types.DoubleType +import org.apache.spark.sql.types.IntegerType +import org.apache.spark.sql.types.StringType +import org.apache.spark.sql.types.StructField +import org.apache.spark.sql.types.StructType +import org.apache.spark.sql.types.TimestampType +import org.apache.spark.unsafe.types.UTF8String +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.util.DateTimeUtils + + +class RowParserTest extends AnyFlatSpec with Matchers { + "The RowParser" should "work different data types" in { + val lines = Seq( + Array("1","lala","2.3","2019-02-01","2019-02-01T12:34:00.000"), + Array("2","lolo","3.4","2019-02-02","2019-02-01T12:34:00"), + Array("","","","",""), + Array(null:String,null:String,null:String,null:String,null:String) + ) + val schema = StructType(Seq( + StructField("c1", IntegerType), + StructField("c2", StringType), + StructField("c3", DoubleType), + StructField("c4", DateType), + StructField("c5", TimestampType) + )) + val parser = new RowParser(schema, RowParser.Options()) + + val result = lines.map(parser.parse) + + result should be (Seq( + Row(1,"lala",2.3, Date.valueOf("2019-02-01"),new Timestamp(DateTimeUtils.stringToTime("2019-02-01T12:34:00").getTime)), + Row(2,"lolo",3.4, Date.valueOf("2019-02-02"),new Timestamp(DateTimeUtils.stringToTime("2019-02-01T12:34:00").getTime)), + Row(null,null,null,null,null), + Row(null,null,null,null,null) + )) + } + + it should "accept fewer columns if told so" in { + val lines = Seq( + Array("1","lala"), + Array("","") + ) + val schema = StructType(Seq( + StructField("c1", IntegerType), + StructField("c2", StringType), + StructField("c3", DoubleType) + )) + val parser = new RowParser(schema, RowParser.Options(addExtraColumns = true)) + + val result = lines.map(parser.parse) + + result should be (Seq( + Row(1,"lala",null), + Row(null,null,null) + )) + } + + it should "throw exceptions on missing columns in strict mode" in { + val lines = Seq( + Array("1","lala") + ) + val schema = StructType(Seq( + StructField("c1", IntegerType), + StructField("c2", StringType), + StructField("c3", DoubleType) + )) + val parser = new RowParser(schema, RowParser.Options(addExtraColumns = false)) + + a[BadRecordException] should be thrownBy (lines.map(parser.parse)) + } + + it should "accept extra columns if told so" in { + val lines = Seq( + Array("1","lala","23","y") + ) + val schema = StructType(Seq( + StructField("c1", IntegerType), + StructField("c2", StringType), + StructField("c3", DoubleType) + )) + val parser = new RowParser(schema, RowParser.Options(removeExtraColumns = true)) + + val result = lines.map(parser.parse) + + result should be (Seq( + Row(1,"lala",23.0) + )) + } + + it should "throw exceptions on extra columns in strict mode" in { + val lines = Seq( + Array("1","lala","x","y") + ) + val schema = StructType(Seq( + StructField("c1", IntegerType), + StructField("c2", StringType), + StructField("c3", DoubleType) + )) + val parser = new RowParser(schema, RowParser.Options(removeExtraColumns = false)) + + a[BadRecordException] should be thrownBy (lines.map(parser.parse)) + } +} diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/SqlParserTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/SqlParserTest.scala index 9fbdcd78a..09d4b0e16 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/SqlParserTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/SqlParserTest.scala @@ -16,11 +16,11 @@ package com.dimajix.spark.sql -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class SqlParserTest extends FlatSpec with Matchers { +class SqlParserTest extends AnyFlatSpec with Matchers { "The SqlParser" should "detect all dependencies" in { val deps = SqlParser.resolveDependencies( """ diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/catalyst/PlanUtilsTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/catalyst/PlanUtilsTest.scala index 334402413..211b7632c 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/catalyst/PlanUtilsTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/catalyst/PlanUtilsTest.scala @@ -21,14 +21,14 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.sql.SqlParser import com.dimajix.spark.testing.LocalSparkSession -class PlanUtilsTest extends FlatSpec with Matchers with LocalSparkSession { +class PlanUtilsTest extends AnyFlatSpec with Matchers with LocalSparkSession { "Replacing dependencies" should "work" in { val sql = """ diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/catalyst/SqlBuilderTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/catalyst/SqlBuilderTest.scala index d9f688910..cc698e815 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/catalyst/SqlBuilderTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/catalyst/SqlBuilderTest.scala @@ -16,13 +16,13 @@ package com.dimajix.spark.sql.catalyst -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.testing.LocalSparkSession -class SqlBuilderTest extends FlatSpec with Matchers with LocalSparkSession { +class SqlBuilderTest extends AnyFlatSpec with Matchers with LocalSparkSession { override def beforeAll(): Unit = { super.beforeAll() diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/DataFrameReaderTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/DataFrameReaderTest.scala index 0af27575b..5f41838c1 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/DataFrameReaderTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/DataFrameReaderTest.scala @@ -16,14 +16,14 @@ package com.dimajix.spark.sql.local -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.sql.local.implicits._ import com.dimajix.spark.testing.LocalSparkSession -class DataFrameReaderTest extends FlatSpec with Matchers with LocalSparkSession { +class DataFrameReaderTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The DataFrameReader" should "be instantiated by a readLocal call" in { spark.readLocal should not be (null) } diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/DataFrameWriterTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/DataFrameWriterTest.scala index 493e353b7..4b18f02e9 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/DataFrameWriterTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/DataFrameWriterTest.scala @@ -16,14 +16,14 @@ package com.dimajix.spark.sql.local -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -import com.dimajix.spark.testing.LocalSparkSession import com.dimajix.spark.sql.local.implicits._ +import com.dimajix.spark.testing.LocalSparkSession -class DataFrameWriterTest extends FlatSpec with Matchers with LocalSparkSession { +class DataFrameWriterTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The DataFrameWriter" should "be instantiated by a readLocal call" in { val writer = spark.emptyDataFrame.writeLocal writer should not be (null) diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/csv/CsvRelationTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/csv/CsvRelationTest.scala index 0d3066f7f..dc5ed73b0 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/csv/CsvRelationTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/local/csv/CsvRelationTest.scala @@ -17,49 +17,57 @@ package com.dimajix.spark.sql.local.csv import java.io.File +import java.sql.Timestamp +import java.sql.Date +import org.apache.spark.sql.Row import org.apache.spark.sql.SaveMode +import org.apache.spark.sql.types.DateType import org.apache.spark.sql.types.DoubleType import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.BeforeAndAfter -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.apache.spark.sql.types.TimestampType +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -import com.dimajix.spark.testing.LocalSparkSession import com.dimajix.spark.sql.local.implicits._ +import com.dimajix.spark.testing.LocalSparkSession -class CsvRelationTest extends FlatSpec with Matchers with BeforeAndAfter with LocalSparkSession { - "The csv relation" should "support writing CSV files" in { - val df = spark.createDataFrame(Seq((1,"lala", 1.2),(2,"lolo", 2.3))) +class CsvRelationTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "The local CSV relation" should "support writing CSV files" in { + val df = spark.createDataFrame(Seq( + (1,"lala", 1.2, Timestamp.valueOf("2020-01-02 23:12:31"), Date.valueOf("2020-01-02")), + (2,"lolo", 2.3, Timestamp.valueOf("2021-03-02 21:12:31"), Date.valueOf("2020-02-02")) + )) df.writeLocal .format("csv") .option("encoding", "UTF-8") + .option("header", true) .save(new File(tempDir, "lala.csv"), SaveMode.Overwrite) } it should "support reading CSV files" in { - val df = spark.readLocal + val result = spark.readLocal .format("csv") .schema(StructType( StructField("int_field", IntegerType) :: StructField("str_field", StringType) :: StructField("double_field", DoubleType) :: + StructField("timestamp_field", TimestampType) :: + StructField("date_field", DateType) :: Nil )) .option("encoding", "UTF-8") + .option("header", true) .load(new File(tempDir, "lala.csv")) - val result = df + result.count() should be (2) - val rows = result.collect() - rows(0).getInt(0) should be (1) - rows(0).getString(1) should be ("lala") - rows(0).getDouble(2) should be (1.2) - rows(1).getInt(0) should be (2) - rows(1).getString(1) should be ("lolo") - rows(1).getDouble(2) should be (2.3) + result.collect() should be (Seq( + Row(1,"lala", 1.2, Timestamp.valueOf("2020-01-02 23:12:31"), Date.valueOf("2020-01-02")), + Row(2,"lolo", 2.3, Timestamp.valueOf("2021-03-02 21:12:31"), Date.valueOf("2020-02-02")) + )) } } diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/empty/NullFormatTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/empty/NullFormatTest.scala index 26391e494..5110f7ba1 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/empty/NullFormatTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/empty/NullFormatTest.scala @@ -28,13 +28,13 @@ import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType import org.apache.spark.sql.types.TimestampType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.testing.LocalSparkSession -class NullFormatTest extends FlatSpec with Matchers with LocalSparkSession { +class NullFormatTest extends AnyFlatSpec with Matchers with LocalSparkSession { val schema = StructType( StructField("s", StringType, metadata = new MetadataBuilder().putLong("size", 8).build()) :: diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/fixedwidth/FixedWidthFormatTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/fixedwidth/FixedWidthFormatTest.scala index 3dd14f431..228b74ea4 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/fixedwidth/FixedWidthFormatTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/fixedwidth/FixedWidthFormatTest.scala @@ -34,13 +34,13 @@ import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType import org.apache.spark.sql.types.TimestampType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.testing.LocalSparkSession -class FixedWidthFormatTest extends FlatSpec with Matchers with LocalSparkSession { +class FixedWidthFormatTest extends AnyFlatSpec with Matchers with LocalSparkSession { private val formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss[.S]").withZone(ZoneOffset.UTC) def parseDateTime(value:String) = new Timestamp(LocalDateTime.parse(value, formatter).toEpochSecond(ZoneOffset.UTC) * 1000l) diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/sequencefile/SequenceFileFormatTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/sequencefile/SequenceFileFormatTest.scala index def80df34..4d435aede 100644 --- a/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/sequencefile/SequenceFileFormatTest.scala +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/spark/sql/sources/sequencefile/SequenceFileFormatTest.scala @@ -24,13 +24,13 @@ import org.apache.spark.sql.types.BinaryType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.spark.testing.LocalSparkSession -class SequenceFileFormatTest extends FlatSpec with Matchers with LocalSparkSession { +class SequenceFileFormatTest extends AnyFlatSpec with Matchers with LocalSparkSession { private val inputRecords = Seq( ("key1", "value1"), ("key2", "value2") diff --git a/flowman-spark-extensions/src/test/scala/com/dimajix/util/DateTimeUtilsTest.scala b/flowman-spark-extensions/src/test/scala/com/dimajix/util/DateTimeUtilsTest.scala new file mode 100644 index 000000000..5edeb732f --- /dev/null +++ b/flowman-spark-extensions/src/test/scala/com/dimajix/util/DateTimeUtilsTest.scala @@ -0,0 +1,53 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.util + +import java.sql.Timestamp +import java.sql.{Date => SqlDate} +import java.time.Instant +import java.time.ZoneId +import java.time.ZonedDateTime +import java.util.Date +import java.util.TimeZone + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + + +class DateTimeUtilsTest extends AnyFlatSpec with Matchers { + "DateTimeUtils" should "correct parse date and time" in { + DateTimeUtils.stringToTime("2020-02-03") should be (SqlDate.valueOf("2020-02-03")) + DateTimeUtils.stringToTime("2020-02-03 23:11:20") should be (Timestamp.valueOf("2020-02-03 23:11:20")) + //DateTimeUtils.stringToTime("2000-01-01T22:33GMT+01:00") + DateTimeUtils.stringToTime("2020-02-03T22:33:11GMT+05:00") should be (Date.from(Instant.from(ZonedDateTime.of(2020,2,3,22,33,11,0,ZoneId.of("GMT+05:00"))))) + //DateTimeUtils.stringToTime("2000-01-01T22:33") + DateTimeUtils.stringToTime("2020-02-03T22:33:11") should be (Timestamp.valueOf("2020-02-03 22:33:11")) + } + + it should "correctly convert milliseconds to days" in { + val utc = TimeZone.getTimeZone("UTC") + + DateTimeUtils.millisToDays(0, utc) should be (0) + DateTimeUtils.millisToDays(DateTimeUtils.MILLIS_PER_DAY, utc) should be (1) + DateTimeUtils.millisToDays(DateTimeUtils.MILLIS_PER_DAY - 1, utc) should be (0) + DateTimeUtils.millisToDays(DateTimeUtils.MILLIS_PER_DAY + 1, utc) should be (1) + + DateTimeUtils.millisToDays(-DateTimeUtils.MILLIS_PER_DAY, utc) should be (-1) + DateTimeUtils.millisToDays(-DateTimeUtils.MILLIS_PER_DAY - 1, utc) should be (-2) + DateTimeUtils.millisToDays(-DateTimeUtils.MILLIS_PER_DAY + 1, utc) should be (-1) + } +} diff --git a/flowman-spark-extensions/src/test/scala/org/apache/spark/sql/SparkShimTest.scala b/flowman-spark-extensions/src/test/scala/org/apache/spark/sql/SparkShimTest.scala new file mode 100644 index 000000000..d504dd69e --- /dev/null +++ b/flowman-spark-extensions/src/test/scala/org/apache/spark/sql/SparkShimTest.scala @@ -0,0 +1,46 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.spark.testing.LocalSparkSession +import com.dimajix.util.DateTimeUtils + + +class SparkShimTest extends AnyFlatSpec with Matchers with LocalSparkSession{ + "The SparkShim" should "return a new Hadoop configuration" in { + val conf = SparkShim.getHadoopConf(spark.sparkContext.getConf) + conf should not be (null) + } + + it should "parse CalendarIntervals" in { + SparkShim.parseCalendarInterval("interval 2 hours") should be(SparkShim.calendarInterval(0, 0, 2 * DateTimeUtils.MICROS_PER_HOUR)) + SparkShim.parseCalendarInterval("interval 1 day") should be(SparkShim.calendarInterval(0, 1)) + } + + it should "support static configs" in { + SparkShim.isStaticConf("spark.sql.warehouse.dir") should be (true) + SparkShim.isStaticConf("spark.sql.autoBroadcastJoinThreshold") should be (false) + } + + it should "find out if a relation supports multiple paths" in { + SparkShim.relationSupportsMultiplePaths(spark, "csv") should be (true) + SparkShim.relationSupportsMultiplePaths(spark, "jdbc") should be (false) + } +} diff --git a/flowman-spark-testing/pom.xml b/flowman-spark-testing/pom.xml index 3516391c8..bb6616464 100644 --- a/flowman-spark-testing/pom.xml +++ b/flowman-spark-testing/pom.xml @@ -9,7 +9,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 .. @@ -43,21 +43,15 @@ - org.mockito - mockito-core - provided + com.dimajix.flowman + flowman-scalatest-compat + compile org.scalatest scalatest_${scala.api_version} - provided - - - - org.scalamock - scalamock_${scala.api_version} - provided + compile diff --git a/flowman-spark-testing/src/main/scala/com/dimajix/spark/testing/MockedSparkSession.scala b/flowman-spark-testing/src/main/scala/com/dimajix/spark/testing/MockedSparkSession.scala deleted file mode 100644 index 378e4873c..000000000 --- a/flowman-spark-testing/src/main/scala/com/dimajix/spark/testing/MockedSparkSession.scala +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2018 Kaya Kupferschmidt - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.dimajix.spark.testing - -import org.apache.spark.sql.SparkSession -import org.mockito.Mockito.when -import org.scalatest.BeforeAndAfterAll -import org.scalatest.Suite -import org.scalatestplus.mockito.MockitoSugar - - -trait MockedSparkSession extends BeforeAndAfterAll with MockitoSugar { this:Suite => - private var session: SparkSession = _ - var spark: SparkSession = _ - - override def beforeAll() : Unit = { - session = SparkSession.builder() - .master("local[2]") - .config("spark.ui.enabled", "false") - .config("spark.sql.warehouse.dir", "file:///tmp/spark-warehouse") - .config("spark.sql.shuffle.partitions", "8") - .getOrCreate() - session.sparkContext.setLogLevel("WARN") - - spark = mock[SparkSession] - when(spark.newSession()).thenReturn(spark) - when(spark.sparkContext).thenReturn(session.sparkContext) - when(spark.conf).thenReturn(session.conf) - } - override def afterAll() : Unit = { - if (session != null) { - session.stop() - session = null - spark = null - } - } -} diff --git a/flowman-spark-testing/src/test/scala/com/dimajix/spark/testing/LocalSparkSessionTest.scala b/flowman-spark-testing/src/test/scala/com/dimajix/spark/testing/LocalSparkSessionTest.scala index bdf8cdcec..a76dc8439 100644 --- a/flowman-spark-testing/src/test/scala/com/dimajix/spark/testing/LocalSparkSessionTest.scala +++ b/flowman-spark-testing/src/test/scala/com/dimajix/spark/testing/LocalSparkSessionTest.scala @@ -16,11 +16,11 @@ package com.dimajix.spark.testing -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class LocalSparkSessionTest extends FlatSpec with Matchers with LocalSparkSession { +class LocalSparkSessionTest extends AnyFlatSpec with Matchers with LocalSparkSession { "A SparkSession" should "have configurable properties" in { spark.conf.set("spark.sql.prop1", "p1") spark.conf.get("spark.sql.prop1") should be ("p1") diff --git a/flowman-spec/pom.xml b/flowman-spec/pom.xml index 55458f20d..7d7295754 100644 --- a/flowman-spec/pom.xml +++ b/flowman-spec/pom.xml @@ -9,7 +9,7 @@ flowman-root com.dimajix.flowman - 0.14.2 + 0.15.0 .. @@ -23,41 +23,6 @@ org.scalatest scalatest-maven-plugin - - org.apache.maven.plugins - maven-shade-plugin - - - - org.json:json - com.github.everit-org.json-schema:org.everit.json.schema - - - - - *:* - - META-INF/* - - - - - - org.json - com.dimajix.shaded.json - - - org.everit.json - com.dimajix.shaded.everit - - - - - - - org.codehaus.mojo - flatten-maven-plugin - @@ -99,23 +64,9 @@ - org.json - json - 20190722 - compile - - - - com.github.everit-org.json-schema - org.everit.json.schema - 1.12.1 - compile - - - - io.swagger - swagger-parser - 1.0.49 + com.damnhandy + handy-uri-templates + 2.1.8 compile @@ -138,10 +89,5 @@ org.scalamock scalamock_${scala.api_version} - - - org.mockito - mockito-core - diff --git a/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/AssertionType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/AssertionType.java new file mode 100644 index 000000000..bb29550ba --- /dev/null +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/AssertionType.java @@ -0,0 +1,37 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.annotation; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + + +/** + * This annotation marks a specific class as a [[Assertion]] to be used in a data flow spec. The specific Relation itself has + * to derive from the Relation class + */ +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.TYPE}) +public @interface AssertionType { + /** + * Specifies the kind of the relation which is used in data flow specifications. + * @return + */ + String kind(); +} diff --git a/flowman-core/src/main/java/com/dimajix/flowman/annotation/CatalogType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/CatalogType.java similarity index 95% rename from flowman-core/src/main/java/com/dimajix/flowman/annotation/CatalogType.java rename to flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/CatalogType.java index 9a4dee102..2e3938c5a 100644 --- a/flowman-core/src/main/java/com/dimajix/flowman/annotation/CatalogType.java +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/CatalogType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.annotation; +package com.dimajix.flowman.spec.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/flowman-core/src/main/java/com/dimajix/flowman/annotation/ConnectionType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/ConnectionType.java similarity index 96% rename from flowman-core/src/main/java/com/dimajix/flowman/annotation/ConnectionType.java rename to flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/ConnectionType.java index 20d442ebe..f94436476 100644 --- a/flowman-core/src/main/java/com/dimajix/flowman/annotation/ConnectionType.java +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/ConnectionType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.annotation; +package com.dimajix.flowman.spec.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/flowman-core/src/main/java/com/dimajix/flowman/annotation/DatasetType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/DatasetType.java similarity index 96% rename from flowman-core/src/main/java/com/dimajix/flowman/annotation/DatasetType.java rename to flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/DatasetType.java index e9de99da3..a52cae760 100644 --- a/flowman-core/src/main/java/com/dimajix/flowman/annotation/DatasetType.java +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/DatasetType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.annotation; +package com.dimajix.flowman.spec.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/flowman-core/src/main/java/com/dimajix/flowman/annotation/HistoryType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/HistoryType.java similarity index 96% rename from flowman-core/src/main/java/com/dimajix/flowman/annotation/HistoryType.java rename to flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/HistoryType.java index dc2483a25..29c0ab400 100644 --- a/flowman-core/src/main/java/com/dimajix/flowman/annotation/HistoryType.java +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/HistoryType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.annotation; +package com.dimajix.flowman.spec.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/flowman-core/src/main/java/com/dimajix/flowman/annotation/HookType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/HookType.java similarity index 96% rename from flowman-core/src/main/java/com/dimajix/flowman/annotation/HookType.java rename to flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/HookType.java index b5bb7e864..e705a1ea6 100644 --- a/flowman-core/src/main/java/com/dimajix/flowman/annotation/HookType.java +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/HookType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.annotation; +package com.dimajix.flowman.spec.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/flowman-core/src/main/java/com/dimajix/flowman/annotation/MappingType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/MappingType.java similarity index 96% rename from flowman-core/src/main/java/com/dimajix/flowman/annotation/MappingType.java rename to flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/MappingType.java index 291d19464..9640f6dc3 100644 --- a/flowman-core/src/main/java/com/dimajix/flowman/annotation/MappingType.java +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/MappingType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.annotation; +package com.dimajix.flowman.spec.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/flowman-core/src/main/java/com/dimajix/flowman/annotation/MetricSinkType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/MetricSinkType.java similarity index 95% rename from flowman-core/src/main/java/com/dimajix/flowman/annotation/MetricSinkType.java rename to flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/MetricSinkType.java index 6faf9f0f5..1838e92b7 100644 --- a/flowman-core/src/main/java/com/dimajix/flowman/annotation/MetricSinkType.java +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/MetricSinkType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.annotation; +package com.dimajix.flowman.spec.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/flowman-core/src/main/java/com/dimajix/flowman/annotation/RelationType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/RelationType.java similarity index 96% rename from flowman-core/src/main/java/com/dimajix/flowman/annotation/RelationType.java rename to flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/RelationType.java index 34e871b4b..7909d808d 100644 --- a/flowman-core/src/main/java/com/dimajix/flowman/annotation/RelationType.java +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/RelationType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.annotation; +package com.dimajix.flowman.spec.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/flowman-core/src/main/java/com/dimajix/flowman/annotation/SchemaType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/SchemaType.java similarity index 96% rename from flowman-core/src/main/java/com/dimajix/flowman/annotation/SchemaType.java rename to flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/SchemaType.java index 2e61fa640..8f18520e6 100644 --- a/flowman-core/src/main/java/com/dimajix/flowman/annotation/SchemaType.java +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/SchemaType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.annotation; +package com.dimajix.flowman.spec.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/flowman-core/src/main/java/com/dimajix/flowman/annotation/StoreType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/StoreType.java similarity index 96% rename from flowman-core/src/main/java/com/dimajix/flowman/annotation/StoreType.java rename to flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/StoreType.java index 9f13d0b92..ac3b0e38b 100644 --- a/flowman-core/src/main/java/com/dimajix/flowman/annotation/StoreType.java +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/StoreType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.annotation; +package com.dimajix.flowman.spec.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/flowman-core/src/main/java/com/dimajix/flowman/annotation/TargetType.java b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/TargetType.java similarity index 96% rename from flowman-core/src/main/java/com/dimajix/flowman/annotation/TargetType.java rename to flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/TargetType.java index c6eddee9c..55b1d1da4 100644 --- a/flowman-core/src/main/java/com/dimajix/flowman/annotation/TargetType.java +++ b/flowman-spec/src/main/java/com/dimajix/flowman/spec/annotation/TargetType.java @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.annotation; +package com.dimajix.flowman.spec.annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; diff --git a/flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ClassAnnotationHandler b/flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ClassAnnotationHandler index a7a895e70..565498e06 100644 --- a/flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ClassAnnotationHandler +++ b/flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ClassAnnotationHandler @@ -9,3 +9,4 @@ com.dimajix.flowman.spec.relation.RelationSpecAnnotationHandler com.dimajix.flowman.spec.schema.SchemaSpecAnnotationHandler com.dimajix.flowman.spec.storage.StorageSpecAnnotationHandler com.dimajix.flowman.spec.target.TargetSpecAnnotationHandler +com.dimajix.flowman.spec.assertion.AssertionSpecAnnotationHandler diff --git a/flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.model.ModuleReader b/flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ModuleReader similarity index 100% rename from flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.model.ModuleReader rename to flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ModuleReader diff --git a/flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.model.NamespaceReader b/flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.spi.NamespaceReader similarity index 100% rename from flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.model.NamespaceReader rename to flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.spi.NamespaceReader diff --git a/flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.model.ProjectReader b/flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ProjectReader similarity index 100% rename from flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.model.ProjectReader rename to flowman-spec/src/main/resources/META-INF/services/com.dimajix.flowman.spi.ProjectReader diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/Module.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/Module.scala index 8cd3effd0..6b13865fa 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/Module.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/Module.scala @@ -25,6 +25,7 @@ import com.dimajix.flowman.spec.mapping.MappingSpec import com.dimajix.flowman.spec.job.JobSpec import com.dimajix.flowman.spec.relation.RelationSpec import com.dimajix.flowman.spec.target.TargetSpec +import com.dimajix.flowman.spec.test.TestSpec class ModuleSpec { @@ -42,6 +43,8 @@ class ModuleSpec { @JsonProperty(value="targets") private var targets: Map[String,TargetSpec] = Map() @JsonDeserialize(converter=classOf[JobSpec.NameResolver]) @JsonProperty(value="jobs") private var jobs: Map[String,JobSpec] = Map() + @JsonDeserialize(converter=classOf[TestSpec.NameResolver]) + @JsonProperty(value="tests") private var tests: Map[String,TestSpec] = Map() def instantiate() : Module = { Module( @@ -52,7 +55,8 @@ class ModuleSpec { relations = relations, mappings = mappings, targets = targets, - jobs = jobs + jobs = jobs, + tests = tests ) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/ObjectMapper.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/ObjectMapper.scala index a4db658f4..4f06f9ef2 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/ObjectMapper.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/ObjectMapper.scala @@ -19,8 +19,10 @@ package com.dimajix.flowman.spec import com.fasterxml.jackson.databind.jsontype.NamedType import com.fasterxml.jackson.databind.{ObjectMapper => JacksonMapper} +import com.dimajix.flowman.spec.assertion.AssertionSpec import com.dimajix.flowman.spec.catalog.CatalogSpec import com.dimajix.flowman.spec.connection.ConnectionSpec +import com.dimajix.flowman.spec.dataset.DatasetSpec import com.dimajix.flowman.spec.history.HistorySpec import com.dimajix.flowman.spec.mapping.MappingSpec import com.dimajix.flowman.spec.relation.RelationSpec @@ -51,6 +53,8 @@ object ObjectMapper extends CoreObjectMapper { val targetTypes = TargetSpec.subtypes.map(kv => new NamedType(kv._2, kv._1)) val schemaTypes = SchemaSpec.subtypes.map(kv => new NamedType(kv._2, kv._1)) val connectionTypes = ConnectionSpec.subtypes.map(kv => new NamedType(kv._2, kv._1)) + val assertionTypes = AssertionSpec.subtypes.map(kv => new NamedType(kv._2, kv._1)) + val datasetTypes = DatasetSpec.subtypes.map(kv => new NamedType(kv._2, kv._1)) val mapper = super.mapper mapper.registerSubtypes(stateStoreTypes:_*) mapper.registerSubtypes(catalogTypes:_*) @@ -60,6 +64,8 @@ object ObjectMapper extends CoreObjectMapper { mapper.registerSubtypes(targetTypes:_*) mapper.registerSubtypes(schemaTypes:_*) mapper.registerSubtypes(connectionTypes:_*) + mapper.registerSubtypes(assertionTypes:_*) + mapper.registerSubtypes(datasetTypes:_*) mapper } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlModuleReader.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlModuleReader.scala index 1d680a872..a11289d30 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlModuleReader.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlModuleReader.scala @@ -20,7 +20,7 @@ import java.io.InputStream import com.dimajix.flowman.hadoop.File import com.dimajix.flowman.model.Module -import com.dimajix.flowman.model.ModuleReader +import com.dimajix.flowman.spi.ModuleReader class YamlModuleReader extends ModuleReader { diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlNamespaceReader.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlNamespaceReader.scala index 9e1054a8e..4e8733f31 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlNamespaceReader.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlNamespaceReader.scala @@ -20,7 +20,7 @@ import java.io.File import java.io.InputStream import com.dimajix.flowman.model.Namespace -import com.dimajix.flowman.model.NamespaceReader +import com.dimajix.flowman.spi.NamespaceReader class YamlNamespaceReader extends NamespaceReader { diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlProjectReader.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlProjectReader.scala index 00e592516..007ab7304 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlProjectReader.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/YamlProjectReader.scala @@ -18,7 +18,7 @@ package com.dimajix.flowman.spec import com.dimajix.flowman.hadoop.File import com.dimajix.flowman.model.Project -import com.dimajix.flowman.model.ProjectReader +import com.dimajix.flowman.spi.ProjectReader class YamlProjectReader extends ProjectReader { diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/assertion/AssertionSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/assertion/AssertionSpec.scala new file mode 100644 index 000000000..2411dabc4 --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/assertion/AssertionSpec.scala @@ -0,0 +1,71 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.assertion + +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.annotation.JsonSubTypes +import com.fasterxml.jackson.annotation.JsonTypeInfo +import com.fasterxml.jackson.databind.util.StdConverter + +import com.dimajix.common.TypeRegistry +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.model.Assertion +import com.dimajix.flowman.spec.NamedSpec +import com.dimajix.flowman.spec.annotation.AssertionType +import com.dimajix.flowman.spi.ClassAnnotationHandler + + +object AssertionSpec extends TypeRegistry[AssertionSpec] { + class NameResolver extends StdConverter[Map[String, AssertionSpec], Map[String, AssertionSpec]] { + override def convert(value: Map[String, AssertionSpec]): Map[String, AssertionSpec] = { + value.foreach(kv => kv._2.name = kv._1) + value + } + } +} + + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "kind", visible = true) +@JsonSubTypes(value = Array( + new JsonSubTypes.Type(name = "sql", value = classOf[SqlAssertionSpec]) +)) +abstract class AssertionSpec extends NamedSpec[Assertion] { + @JsonProperty(value="description", required = false) private var description: Option[String] = None + + override def instantiate(context: Context): Assertion + + override protected def instanceProperties(context:Context) : Assertion.Properties = { + require(context != null) + Assertion.Properties( + context, + context.namespace, + context.project, + name, + kind, + context.evaluate(labels), + context.evaluate(description) + ) + } +} + + +class AssertionSpecAnnotationHandler extends ClassAnnotationHandler { + override def annotation: Class[_] = classOf[AssertionType] + + override def register(clazz: Class[_]): Unit = + AssertionSpec.register(clazz.getAnnotation(classOf[AssertionType]).kind(), clazz.asInstanceOf[Class[_ <: AssertionSpec]]) +} diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/assertion/SqlAssertion.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/assertion/SqlAssertion.scala new file mode 100644 index 000000000..00a7d11cc --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/assertion/SqlAssertion.scala @@ -0,0 +1,148 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.assertion + +import com.fasterxml.jackson.annotation.JsonProperty +import org.apache.spark.sql.DataFrame +import org.slf4j.LoggerFactory + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.model.Assertion +import com.dimajix.flowman.model.AssertionResult +import com.dimajix.flowman.model.BaseAssertion +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.ResourceIdentifier +import com.dimajix.spark.sql.DataFrameUtils +import com.dimajix.spark.sql.SqlParser + + +object SqlAssertion { + case class Case( + query:String, + expected:Seq[Array[String]] = Seq() + ) { + def sql : String = query + + override def hashCode(): Int = { + (query, expected.map(_.toSeq)).hashCode() + } + + override def equals(obj: Any): Boolean = { + if (obj == null) { + false + } + else if (super.equals(obj)) { + true + } + else if (!obj.isInstanceOf[Case]) { + false + } + else { + val otherCase = obj.asInstanceOf[Case] + val l = (query, expected.map(_.toSeq)) + val r = (otherCase.query, otherCase.expected.map(_.toSeq)) + l == r + } + } + } +} +case class SqlAssertion( + override val instanceProperties:Assertion.Properties, + tests: Seq[SqlAssertion.Case] +) extends BaseAssertion { + private val logger = LoggerFactory.getLogger(classOf[SqlAssertion]) + + /** + * Returns a list of physical resources required by this assertion. This list will only be non-empty for assertions + * which actually read from physical data. + * + * @return + */ + override def requires: Set[ResourceIdentifier] = Set() + + /** + * Returns the dependencies (i.e. names of tables in the Dataflow model) + * + * @return + */ + override def inputs: Seq[MappingOutputIdentifier] = { + tests.flatMap(test => SqlParser.resolveDependencies(test.sql)) + .map(MappingOutputIdentifier.parse) + .distinct + } + + /** + * Executes this [[Assertion]] and returns a corresponding DataFrame + * + * @param execution + * @param input + * @return + */ + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]): Seq[AssertionResult] = { + require(execution != null) + require(input != null) + + DataFrameUtils.withTempViews(input.map(kv => kv._1.name -> kv._2)) { + tests.map { test => + // Execute query + val sql = test.sql + val actual = execution.spark.sql(sql) + + val result = DataFrameUtils.diffToStringValues(test.expected, actual) + result match { + case Some(diff) => + logger.error(s"query: $sql\n$diff") + AssertionResult(sql, false) + case None => + AssertionResult(sql, true) + } + } + } + } +} + + +object SqlAssertionSpec { + class Case { + @JsonProperty(value="query", required=true) private var query:String = "" + @JsonProperty(value="expected", required=true) private var expected:Seq[Array[String]] = Seq() + + def instantiate(context:Context) : SqlAssertion.Case = { + SqlAssertion.Case( + context.evaluate(query), + expected.map(_.map(context.evaluate)) + ) + } + } +} +class SqlAssertionSpec extends AssertionSpec { + @JsonProperty(value="tests", required=false) private var tests:Seq[SqlAssertionSpec.Case] = Seq() + @JsonProperty(value="query", required=false) private var query:String = "" + @JsonProperty(value="expected", required=false) private var expected:Seq[Array[String]] = Seq() + + override def instantiate(context: Context): SqlAssertion = { + val embeddedQuery = context.evaluate(query) + val embeddedExpectation = expected.map(_.map(context.evaluate)) + val embeddedCase = if (embeddedQuery.nonEmpty) Some(SqlAssertion.Case(embeddedQuery, embeddedExpectation)) else None + + SqlAssertion( + instanceProperties(context), + embeddedCase.toSeq ++ tests.map(_.instantiate(context)) + ) + } +} diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/catalog/CatalogSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/catalog/CatalogSpec.scala index 14489f1a8..1ed0ae3d5 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/catalog/CatalogSpec.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/catalog/CatalogSpec.scala @@ -20,11 +20,11 @@ import com.fasterxml.jackson.annotation.JsonSubTypes import com.fasterxml.jackson.annotation.JsonTypeInfo import com.dimajix.common.TypeRegistry -import com.dimajix.flowman.annotation.CatalogType import com.dimajix.flowman.annotation.TemplateObject import com.dimajix.flowman.catalog.ExternalCatalog import com.dimajix.flowman.execution.Context import com.dimajix.flowman.spec.Spec +import com.dimajix.flowman.spec.annotation.CatalogType import com.dimajix.flowman.spi.ClassAnnotationHandler import com.dimajix.flowman.templating.Velocity diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/connection/ConnectionSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/connection/ConnectionSpec.scala index e50a4cbda..fb28d2e11 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/connection/ConnectionSpec.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/connection/ConnectionSpec.scala @@ -21,10 +21,10 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo import com.fasterxml.jackson.databind.util.StdConverter import com.dimajix.common.TypeRegistry -import com.dimajix.flowman.annotation.ConnectionType import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.Connection import com.dimajix.flowman.spec.NamedSpec +import com.dimajix.flowman.spec.annotation.ConnectionType import com.dimajix.flowman.spi.ClassAnnotationHandler diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/DatasetSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/DatasetSpec.scala index f07296f46..bdab6f0af 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/DatasetSpec.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/DatasetSpec.scala @@ -21,10 +21,10 @@ import com.fasterxml.jackson.annotation.JsonSubTypes import com.fasterxml.jackson.annotation.JsonTypeInfo import com.dimajix.common.TypeRegistry -import com.dimajix.flowman.annotation.DatasetType import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.Dataset import com.dimajix.flowman.spec.Spec +import com.dimajix.flowman.spec.annotation.DatasetType import com.dimajix.flowman.spi.ClassAnnotationHandler @@ -35,7 +35,8 @@ object DatasetSpec extends TypeRegistry[DatasetSpec] { @JsonSubTypes(value = Array( new JsonSubTypes.Type(name = "file", value = classOf[FileDatasetSpec]), new JsonSubTypes.Type(name = "mapping", value = classOf[MappingDatasetSpec]), - new JsonSubTypes.Type(name = "relation", value = classOf[RelationDatasetSpec]) + new JsonSubTypes.Type(name = "relation", value = classOf[RelationDatasetSpec]), + new JsonSubTypes.Type(name = "values", value = classOf[ValuesDatasetSpec]) )) abstract class DatasetSpec extends Spec[Dataset] { @JsonProperty(value="kind", required = true) protected var kind: String = _ diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/FileDataset.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/FileDataset.scala index 49d7bde6b..fee0f267a 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/FileDataset.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/FileDataset.scala @@ -27,7 +27,7 @@ import org.slf4j.LoggerFactory import com.dimajix.common.Trilean import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.model.AbstractInstance import com.dimajix.flowman.model.Dataset @@ -66,23 +66,23 @@ case class FileDataset( /** * Returns true if the data represented by this Dataset actually exists * - * @param executor + * @param execution * @return */ - override def exists(executor: Executor): Trilean = { - val file = executor.fs.file(location) + override def exists(execution: Execution): Trilean = { + val file = execution.fs.file(location) file.exists() } /** * Removes the data represented by this dataset, but leaves the underlying relation present * - * @param executor + * @param execution */ - override def clean(executor: Executor): Unit = { - require(executor != null) + override def clean(execution: Execution): Unit = { + require(execution != null) - val file = executor.fs.file(location) + val file = execution.fs.file(location) if (file.exists()) { logger.info(s"Deleting directory '$location' of dataset '$name") file.delete( true) @@ -92,14 +92,14 @@ case class FileDataset( /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @return */ - override def read(executor: Executor, schema: Option[org.apache.spark.sql.types.StructType]): DataFrame = { - require(executor != null) + override def read(execution: Execution, schema: Option[org.apache.spark.sql.types.StructType]): DataFrame = { + require(execution != null) - val baseReader = executor.spark.read + val baseReader = execution.spark.read .options(options) .format(format) @@ -108,7 +108,7 @@ case class FileDataset( // Use either load(files) or load(single_file) - this actually results in different code paths in Spark // load(single_file) will set the "path" option, while load(multiple_files) needs direct support from the // underlying format implementation - val providingClass = DataSource.lookupDataSource(format, executor.spark.sessionState.conf) + val providingClass = DataSource.lookupDataSource(format, execution.spark.sessionState.conf) val df = providingClass.newInstance() match { case _: RelationProvider => reader.load(location.toString) case _: SchemaRelationProvider => reader.load(location.toString) @@ -122,10 +122,10 @@ case class FileDataset( /** * Writes data into the relation, possibly into a specific partition * - * @param executor + * @param execution * @param df - dataframe to write */ - override def write(executor: Executor, df: DataFrame, mode: OutputMode) : Unit = { + override def write(execution: Execution, df: DataFrame, mode: OutputMode) : Unit = { val outputDf = SchemaUtils.applySchema(df, schema.map(_.sparkSchema)) outputDf.write @@ -140,7 +140,7 @@ case class FileDataset( * * @return */ - override def describe(executor:Executor) : Option[StructType] = { + override def describe(execution:Execution) : Option[StructType] = { schema.map(s => StructType(s.fields)) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/MappingDataset.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/MappingDataset.scala index 566543c65..cf406fd2d 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/MappingDataset.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/MappingDataset.scala @@ -22,7 +22,7 @@ import org.apache.spark.sql.DataFrame import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.MappingUtils import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.model.AbstractInstance @@ -60,40 +60,40 @@ case class MappingDataset( /** * Returns true if the data represented by this Dataset actually exists * - * @param executor + * @param execution * @return */ - override def exists(executor: Executor): Trilean = Yes + override def exists(execution: Execution): Trilean = Yes /** * Removes the data represented by this dataset, but leaves the underlying relation present * - * @param executor + * @param execution */ - override def clean(executor: Executor): Unit = { + override def clean(execution: Execution): Unit = { throw new UnsupportedOperationException } /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @return */ - override def read(executor: Executor, schema: Option[org.apache.spark.sql.types.StructType]): DataFrame = { + override def read(execution: Execution, schema: Option[org.apache.spark.sql.types.StructType]): DataFrame = { val instance = context.getMapping(mapping.mapping) - val df = executor.instantiate(instance, mapping.output) + val df = execution.instantiate(instance, mapping.output) SchemaUtils.applySchema(df, schema) } /** * Writes data into the relation, possibly into a specific partition * - * @param executor + * @param execution * @param df - dataframe to write */ - override def write(executor: Executor, df: DataFrame, mode: OutputMode): Unit = { + override def write(execution: Execution, df: DataFrame, mode: OutputMode): Unit = { throw new UnsupportedOperationException } @@ -102,9 +102,9 @@ case class MappingDataset( * * @return */ - override def describe(executor:Executor) : Option[StructType] = { + override def describe(execution:Execution) : Option[StructType] = { val instance = context.getMapping(mapping.mapping) - Some(executor.describe(instance, mapping.output)) + Some(execution.describe(instance, mapping.output)) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/RelationDataset.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/RelationDataset.scala index 2e83fda0b..aa6a90b75 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/RelationDataset.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/RelationDataset.scala @@ -21,7 +21,7 @@ import org.apache.spark.sql.DataFrame import com.dimajix.common.Trilean import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.model.AbstractInstance import com.dimajix.flowman.model.Dataset @@ -66,55 +66,57 @@ case class RelationDataset( /** * Returns true if the data represented by this Dataset actually exists * - * @param executor + * @param execution * @return */ - override def exists(executor: Executor): Trilean = { + override def exists(execution: Execution): Trilean = { val instance = context.getRelation(relation) - instance.loaded(executor, partition) + instance.loaded(execution, partition) } /** * Removes the data represented by this dataset, but leaves the underlying relation present * - * @param executor + * @param execution */ - override def clean(executor: Executor): Unit = { + override def clean(execution: Execution): Unit = { val instance = context.getRelation(relation) - instance.truncate(executor, partition) + instance.truncate(execution, partition) } /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @return */ - override def read(executor: Executor, schema: Option[org.apache.spark.sql.types.StructType]): DataFrame = { + override def read(execution: Execution, schema: Option[org.apache.spark.sql.types.StructType]): DataFrame = { val instance = context.getRelation(relation) - instance.read(executor, schema, partition) + instance.read(execution, schema, partition) } /** * Writes data into the relation, possibly into a specific partition * - * @param executor + * @param execution * @param df - dataframe to write */ - override def write(executor: Executor, df: DataFrame, mode: OutputMode): Unit = { + override def write(execution: Execution, df: DataFrame, mode: OutputMode): Unit = { val instance = context.getRelation(relation) - instance.write(executor, df, partition, mode) + // Remove partition columns + val outDf = partition.keys.foldLeft(df)((df,col) => df.drop(col)) + instance.write(execution, outDf, partition, mode) } /** - * Returns the schema as produced by this dataset, relative to the given input schema + * Returns the schema as produced by this dataset. The schema will not include any partition columns * * @return */ - override def describe(executor:Executor) : Option[StructType] = { + override def describe(execution:Execution) : Option[StructType] = { val instance = context.getRelation(relation) - instance.schema.map(s => StructType(s.fields)) + Some(instance.describe(execution)) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/ValuesDataset.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/ValuesDataset.scala new file mode 100644 index 000000000..0d745ac2e --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/dataset/ValuesDataset.scala @@ -0,0 +1,130 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.dataset + +import com.fasterxml.jackson.annotation.JsonProperty +import org.apache.spark.sql.DataFrame + +import com.dimajix.common.Trilean +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.execution.OutputMode +import com.dimajix.flowman.model.AbstractInstance +import com.dimajix.flowman.model.Dataset +import com.dimajix.flowman.model.ResourceIdentifier +import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.spec.schema.SchemaSpec +import com.dimajix.flowman.types.ArrayRecord +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.FieldType +import com.dimajix.flowman.types.MapRecord +import com.dimajix.flowman.types.Record +import com.dimajix.flowman.types.StructType +import com.dimajix.flowman.types.ValueRecord +import com.dimajix.flowman.util.SchemaUtils +import com.dimajix.spark.sql.DataFrameUtils + + +case class ValuesDataset( + instanceProperties: Dataset.Properties, + columns:Seq[Field] = Seq(), + schema:Option[Schema] = None, + records:Seq[Record] = Seq() +) extends AbstractInstance with Dataset { + override def provides: Set[ResourceIdentifier] = Set() + + /** + * Returns a list of physical resources required for reading from this dataset + * + * @return + */ + override def requires: Set[ResourceIdentifier] = Set() + + /** + * Returns true if the data represented by this Dataset actually exists + * + * @param execution + * @return + */ + override def exists(execution: Execution): Trilean = Yes + + /** + * Removes the data represented by this dataset, but leaves the underlying relation present + * + * @param execution + */ + override def clean(execution: Execution): Unit = { + throw new UnsupportedOperationException + } + + /** + * Reads data from the relation, possibly from specific partitions + * + * @param execution + * @param schema - the schema to read. If none is specified, all available columns will be read + * @return + */ + override def read(execution: Execution, schema: Option[org.apache.spark.sql.types.StructType]): DataFrame = { + val recordsSchema = StructType(this.schema.map(_.fields).getOrElse(columns)) + val sparkSchema = recordsSchema.sparkType + + val values = records.map(_.toArray(recordsSchema)) + val df = DataFrameUtils.ofStringValues(execution.spark, values, sparkSchema) + SchemaUtils.applySchema(df, schema) + } + + /** + * Writes data into the relation, possibly into a specific partition + * + * @param execution + * @param df - dataframe to write + */ + override def write(execution: Execution, df: DataFrame, mode: OutputMode): Unit = { + throw new UnsupportedOperationException + } + + /** + * Returns the schema as produced by this dataset, relative to the given input schema + * + * @return + */ + override def describe(execution: Execution): Option[StructType] = { + Some(StructType(schema.map(_.fields).getOrElse(columns))) + } +} + + +class ValuesDatasetSpec extends DatasetSpec { + @JsonProperty(value = "schema", required=false) private var schema:Option[SchemaSpec] = None + @JsonProperty(value = "columns", required = false) private var columns:Map[String,String] = Map() + @JsonProperty(value = "records", required=false) private var records:Seq[Record] = Seq() + + /** + * Creates the instance of the specified Mapping with all variable interpolation being performed + * @param context + * @return + */ + override def instantiate(context: Context): ValuesDataset = { + ValuesDataset( + instanceProperties(context, "values"), + context.evaluate(columns).toSeq.map(kv => Field(kv._1, FieldType.of(kv._2))), + schema.map(_.instantiate(context)), + records.map(_.map(context.evaluate)) + ) + } +} diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/history/HistorySpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/history/HistorySpec.scala index 2e2b5bcb8..0f70f671f 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/history/HistorySpec.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/history/HistorySpec.scala @@ -20,10 +20,10 @@ import com.fasterxml.jackson.annotation.JsonSubTypes import com.fasterxml.jackson.annotation.JsonTypeInfo import com.dimajix.common.TypeRegistry -import com.dimajix.flowman.annotation.HistoryType import com.dimajix.flowman.execution.Context import com.dimajix.flowman.history.StateStore import com.dimajix.flowman.spec.Spec +import com.dimajix.flowman.spec.annotation.HistoryType import com.dimajix.flowman.spi.ClassAnnotationHandler diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/hook/HookSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/hook/HookSpec.scala index 20ebc2871..991b6fc4d 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/hook/HookSpec.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/hook/HookSpec.scala @@ -21,10 +21,10 @@ import com.fasterxml.jackson.annotation.JsonSubTypes import com.fasterxml.jackson.annotation.JsonTypeInfo import com.dimajix.common.TypeRegistry -import com.dimajix.flowman.annotation.HookType import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.Hook import com.dimajix.flowman.spec.Spec +import com.dimajix.flowman.spec.annotation.HookType import com.dimajix.flowman.spi.ClassAnnotationHandler diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AggregateMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AggregateMapping.scala index 478b276f6..2fc685d8a 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AggregateMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AggregateMapping.scala @@ -22,7 +22,7 @@ import org.apache.spark.sql.functions.col import org.apache.spark.sql.functions.expr import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -39,11 +39,11 @@ case class AggregateMapping( /** * Creates an instance of the aggregated table. * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor:Executor, tables:Map[MappingOutputIdentifier,DataFrame]): Map[String,DataFrame] = { + override def execute(execution:Execution, tables:Map[MappingOutputIdentifier,DataFrame]): Map[String,DataFrame] = { val df = tables(input) val dims = dimensions.map(col) val expressions = aggregations.map(kv => expr(kv._2).as(kv._1)) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AliasMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AliasMapping.scala index f0a659139..ad62e024b 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AliasMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AliasMapping.scala @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -42,11 +42,11 @@ case class AliasMapping( /** * Executes this mapping by returning a DataFrame which corresponds to the specified input - * @param executor + * @param execution * @param input * @return */ - override def execute(executor:Executor, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + override def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { val result = input(this.input) Map("main" -> result) } @@ -56,8 +56,8 @@ case class AliasMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val result = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AssembleMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AssembleMapping.scala index f518a2ea2..edc422b33 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AssembleMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/AssembleMapping.scala @@ -22,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -133,12 +133,12 @@ case class AssembleMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param deps * @return */ - override def execute(executor: Executor, deps: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { - require(executor != null) + override def execute(execution: Execution, deps: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { + require(execution != null) require(deps != null) val df = deps(input) @@ -156,8 +156,8 @@ case class AssembleMapping( * @param deps * @return */ - override def describe(executor:Executor, deps:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, deps:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(deps != null) val schema = deps(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/CaseMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/CaseMapping.scala new file mode 100644 index 000000000..d2aa4f0ef --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/CaseMapping.scala @@ -0,0 +1,53 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.fasterxml.jackson.annotation.JsonProperty + + +object CaseMappingSpec { + class Case { + @JsonProperty(value = "condition", required = true) private[CaseMappingSpec] var condition:String = "" + @JsonProperty(value = "input", required = true) private[CaseMappingSpec] var input:String = "" + } +} + +class CaseMappingSpec extends MappingSpec { + @JsonProperty(value = "cases", required = true) private var cases: Seq[CaseMappingSpec.Case] = Seq() + + /** + * Creates the instance of the specified Mapping with all variable interpolation being performed + * @param context + * @return + */ + override def instantiate(context: Context): AliasMapping = { + def eval(cond:String) : Boolean = { + context.evaluate(s"#if (${cond}) true #else false #end").trim.toBoolean + } + val props = instanceProperties(context) + val input = cases.find(c => eval(c.condition)) + if (input.isEmpty) + throw new IllegalArgumentException(s"No valid case found in case mapping '${props.identifier}'") + + AliasMapping( + props, + MappingOutputIdentifier(context.evaluate(input.get.input)) + ) + } +} diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/CoalesceMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/CoalesceMapping.scala index 232b03c55..a4dcfa448 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/CoalesceMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/CoalesceMapping.scala @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -35,12 +35,12 @@ case class CoalesceMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor:Executor, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(input != null) val df = input(this.input) @@ -63,8 +63,8 @@ case class CoalesceMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val result = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ConformMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ConformMapping.scala index 9fd03bfdf..81b57a57f 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ConformMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ConformMapping.scala @@ -22,7 +22,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -56,12 +56,12 @@ extends BaseMapping { /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor: Executor, input: Map[MappingOutputIdentifier, DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(input != null) val df = input(this.input) @@ -81,8 +81,8 @@ extends BaseMapping { * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val schema = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DeduplicateMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DeduplicateMapping.scala index 0c2fbb862..89f711efe 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DeduplicateMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DeduplicateMapping.scala @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -45,12 +45,12 @@ case class DeduplicateMapping( /** * Creates an instance of the deduplication table. * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor:Executor, tables:Map[MappingOutputIdentifier,DataFrame]): Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, tables:Map[MappingOutputIdentifier,DataFrame]): Map[String,DataFrame] = { + require(execution != null) require(input != null) val df = tables(input) @@ -76,8 +76,8 @@ case class DeduplicateMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val result = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DistinctMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DistinctMapping.scala index a910674ec..6dbed2076 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DistinctMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DistinctMapping.scala @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -44,12 +44,12 @@ case class DistinctMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor: Executor, tables: Map[MappingOutputIdentifier, DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution: Execution, tables: Map[MappingOutputIdentifier, DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(input != null) val df = tables(input) @@ -66,8 +66,8 @@ case class DistinctMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val result = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DropMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DropMapping.scala index ff6484a60..f2f505a6d 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DropMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/DropMapping.scala @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -47,12 +47,12 @@ case class DropMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param deps * @return */ - override def execute(executor: Executor, deps: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { - require(executor != null) + override def execute(execution: Execution, deps: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { + require(execution != null) require(deps != null) val df = deps(input) @@ -70,8 +70,8 @@ case class DropMapping( * @param deps * @return */ - override def describe(executor:Executor, deps:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, deps:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(deps != null) val schema = deps(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExplodeMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExplodeMapping.scala index 06c5b30e3..6811053ad 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExplodeMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExplodeMapping.scala @@ -23,7 +23,7 @@ import org.apache.spark.sql.DataFrame import org.apache.spark.sql.{types => st} import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -67,12 +67,12 @@ case class ExplodeMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param deps * @return */ - override def execute(executor: Executor, deps: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = { - require(executor != null) + override def execute(execution: Execution, deps: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = { + require(execution != null) require(deps != null) def isSimpleArray(df:DataFrame) : Boolean = { @@ -100,8 +100,8 @@ case class ExplodeMapping( * @param deps * @return */ - override def describe(executor:Executor, deps:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, deps:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(deps != null) def isSimpleArray(dt:StructType) : Boolean = { diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExtendMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExtendMapping.scala index 3da15e589..67147dc5c 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExtendMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExtendMapping.scala @@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.parser.CatalystSqlParser import org.apache.spark.sql.functions.expr import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -46,12 +46,12 @@ case class ExtendMapping( /** * Executes this Transform by reading from the specified source and returns a corresponding DataFrame * - * @param executor + * @param execution * @param deps * @return */ - override def execute(executor:Executor, deps:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, deps:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(deps != null) val allColumns = this.columns diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExtractJsonMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExtractJsonMapping.scala index 70fc7d4e4..2fd3ff757 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExtractJsonMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ExtractJsonMapping.scala @@ -24,7 +24,7 @@ import org.apache.spark.sql.functions.lit import org.apache.spark.sql.types.StringType import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -38,7 +38,7 @@ case class ExtractJsonMapping( instanceProperties:Mapping.Properties, input:MappingOutputIdentifier, column: String, - schema: Schema, + schema: Option[Schema], parseMode: String = "PERMISSIVE", allowComments: Boolean = false, allowUnquotedFieldNames: Boolean = false, @@ -68,20 +68,18 @@ case class ExtractJsonMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param deps * @return */ - override def execute(executor: Executor, deps: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { - require(executor != null) + override def execute(execution: Execution, deps: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { + require(execution != null) require(deps != null) - val spark = executor.spark + val spark = execution.spark val corruptedColumn = "_flowman_corrupted_column" - val sparkSchema = Option(schema).map(schema => schema.sparkSchema.add(corruptedColumn, StringType)).orNull val table = deps(this.input) - val result = spark.read - .schema(sparkSchema) + val reader = spark.read .option("mode", parseMode) .option("columnNameOfCorruptRecord", corruptedColumn) .option("allowComments", allowComments) @@ -91,7 +89,12 @@ case class ExtractJsonMapping( .option("allowNonNumericNumbers", allowNonNumericNumbers) .option("allowBackslashEscapingAnyCharacter", allowBackslashEscapingAnyCharacter) .option("allowUnquotedControlChars", allowUnquotedControlChars) - .json(table.select(table(column).cast(StringType)).as[String](Encoders.STRING)) + schema.foreach { schema => + val sparkSchema = schema.sparkSchema.add(corruptedColumn, StringType) + reader.schema(sparkSchema) + } + + val result = reader.json(table.select(table(column).cast(StringType)).as[String](Encoders.STRING)) // If no schema is specified, Spark will only add the error column if an error actually occurred val (mainResult, errorResult) = @@ -110,7 +113,8 @@ case class ExtractJsonMapping( Map( "main" -> mainResult, - "error" -> errorResult + "error" -> errorResult, + "cache" -> result ) } @@ -119,11 +123,11 @@ case class ExtractJsonMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,ftypes.StructType]) : Map[String,ftypes.StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,ftypes.StructType]) : Map[String,ftypes.StructType] = { + require(execution != null) require(input != null) - val mainSchema = ftypes.StructType(if (schema != null) schema.fields else Seq()) + val mainSchema = ftypes.StructType(schema.map(_.fields).getOrElse(Seq())) val errorSchema = ftypes.StructType(Seq(Field("record", ftypes.StringType, false))) Map( "main" -> mainSchema, @@ -137,7 +141,7 @@ case class ExtractJsonMapping( class ExtractJsonMappingSpec extends MappingSpec { @JsonProperty(value = "input", required = true) private var input: String = _ @JsonProperty(value = "column", required = true) private var column: String = _ - @JsonProperty(value = "schema", required = false) private var schema: SchemaSpec = _ + @JsonProperty(value = "schema", required = false) private var schema: Option[SchemaSpec] = None @JsonProperty(value = "parseMode", required = false) private var parseMode: String = "PERMISSIVE" @JsonProperty(value = "allowComments", required = false) private var allowComments: String = "false" @JsonProperty(value = "allowUnquotedFieldNames", required = false) private var allowUnquotedFieldNames: String = "false" @@ -157,7 +161,7 @@ class ExtractJsonMappingSpec extends MappingSpec { instanceProperties(context), MappingOutputIdentifier(context.evaluate(input)), context.evaluate(column), - if (schema != null) schema.instantiate(context) else null, + schema.map(_.instantiate(context)), context.evaluate(parseMode), context.evaluate(allowComments).toBoolean, context.evaluate(allowUnquotedFieldNames).toBoolean, diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/FilterMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/FilterMapping.scala index fe95fb38f..d48124927 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/FilterMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/FilterMapping.scala @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -44,12 +44,12 @@ case class FilterMapping( /** * Executes this Transform by reading from the specified source and returns a corresponding DataFrame * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor:Executor, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(tables != null) val df = tables(input) @@ -63,8 +63,8 @@ case class FilterMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val result = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/FlattenMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/FlattenMapping.scala index e33e94e21..bf24e541c 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/FlattenMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/FlattenMapping.scala @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -47,12 +47,12 @@ case class FlattenMapping( /** * Executes the mapping operation and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor: Executor, input: Map[MappingOutputIdentifier, DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(input != null) val df = input(this.input) @@ -71,8 +71,8 @@ case class FlattenMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val mappingId = this.input diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/HistorizeMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/HistorizeMapping.scala index de7703156..766826e50 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/HistorizeMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/HistorizeMapping.scala @@ -24,7 +24,7 @@ import org.apache.spark.sql.functions.lead import com.dimajix.common.MapIgnoreCase import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -54,12 +54,12 @@ case class HistorizeMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor:Executor, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(tables != null) val df = tables(input) @@ -101,8 +101,8 @@ case class HistorizeMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val fields = input(this.input).fields diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/JoinMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/JoinMapping.scala index e77fe9710..240b77b93 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/JoinMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/JoinMapping.scala @@ -21,7 +21,7 @@ import org.apache.spark.sql.DataFrame import org.apache.spark.sql.functions.expr import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -47,12 +47,12 @@ case class JoinMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor: Executor, tables: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { - require(executor != null) + override def execute(execution: Execution, tables: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { + require(execution != null) require(tables != null) val result = if (condition.nonEmpty) { diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/MappingSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/MappingSpec.scala index 3d6242c34..9284310e2 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/MappingSpec.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/MappingSpec.scala @@ -23,10 +23,10 @@ import com.fasterxml.jackson.databind.util.StdConverter import org.apache.spark.storage.StorageLevel import com.dimajix.common.TypeRegistry -import com.dimajix.flowman.annotation.MappingType import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.spec.NamedSpec +import com.dimajix.flowman.spec.annotation.MappingType import com.dimajix.flowman.spi.ClassAnnotationHandler @@ -47,12 +47,15 @@ object MappingSpec extends TypeRegistry[MappingSpec] { new JsonSubTypes.Type(name = "aggregate", value = classOf[AggregateMappingSpec]), new JsonSubTypes.Type(name = "alias", value = classOf[AliasMappingSpec]), new JsonSubTypes.Type(name = "assemble", value = classOf[AssembleMappingSpec]), + new JsonSubTypes.Type(name = "case", value = classOf[CaseMappingSpec]), new JsonSubTypes.Type(name = "coalesce", value = classOf[CoalesceMappingSpec]), new JsonSubTypes.Type(name = "conform", value = classOf[ConformMappingSpec]), + new JsonSubTypes.Type(name = "const", value = classOf[ValuesMappingSpec]), new JsonSubTypes.Type(name = "deduplicate", value = classOf[DeduplicateMappingSpec]), new JsonSubTypes.Type(name = "distinct", value = classOf[DistinctMappingSpec]), new JsonSubTypes.Type(name = "drop", value = classOf[DropMappingSpec]), new JsonSubTypes.Type(name = "earliest", value = classOf[EarliestMappingSpec]), + new JsonSubTypes.Type(name = "empty", value = classOf[NullMappingSpec]), new JsonSubTypes.Type(name = "explode", value = classOf[ExplodeMappingSpec]), new JsonSubTypes.Type(name = "extend", value = classOf[ExtendMappingSpec]), new JsonSubTypes.Type(name = "extractJson", value = classOf[ExtractJsonMappingSpec]), @@ -61,6 +64,8 @@ object MappingSpec extends TypeRegistry[MappingSpec] { new JsonSubTypes.Type(name = "historize", value = classOf[HistorizeMappingSpec]), new JsonSubTypes.Type(name = "join", value = classOf[JoinMappingSpec]), new JsonSubTypes.Type(name = "latest", value = classOf[LatestMappingSpec]), + new JsonSubTypes.Type(name = "mock", value = classOf[MockMappingSpec]), + new JsonSubTypes.Type(name = "null", value = classOf[NullMappingSpec]), new JsonSubTypes.Type(name = "project", value = classOf[ProjectMappingSpec]), new JsonSubTypes.Type(name = "provided", value = classOf[ProvidedMappingSpec]), new JsonSubTypes.Type(name = "read", value = classOf[ReadRelationMappingSpec]), @@ -78,7 +83,8 @@ object MappingSpec extends TypeRegistry[MappingSpec] { new JsonSubTypes.Type(name = "union", value = classOf[UnionMappingSpec]), new JsonSubTypes.Type(name = "unit", value = classOf[UnitMappingSpec]), new JsonSubTypes.Type(name = "unpackJson", value = classOf[UnpackJsonMappingSpec]), - new JsonSubTypes.Type(name = "update", value = classOf[UpdateMappingSpec]) + new JsonSubTypes.Type(name = "upsert", value = classOf[UpsertMappingSpec]), + new JsonSubTypes.Type(name = "values", value = classOf[ValuesMappingSpec]) )) abstract class MappingSpec extends NamedSpec[Mapping] { @JsonProperty("broadcast") protected var broadcast:String = "false" diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/MockMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/MockMapping.scala new file mode 100644 index 000000000..058d6e1a6 --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/MockMapping.scala @@ -0,0 +1,151 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import com.fasterxml.jackson.annotation.JsonProperty +import org.apache.spark.sql.DataFrame + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.model.BaseMapping +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.types.ArrayRecord +import com.dimajix.flowman.types.MapRecord +import com.dimajix.flowman.types.Record +import com.dimajix.flowman.types.StructType +import com.dimajix.flowman.types.ValueRecord +import com.dimajix.spark.sql.DataFrameUtils + + +case class MockMapping( + instanceProperties:Mapping.Properties, + mapping:MappingIdentifier, + records:Seq[Record] = Seq() +) extends BaseMapping { + private lazy val mocked = context.getMapping(mapping, false) + + /** + * Returns the dependencies (i.e. names of tables in the Dataflow model) + * + * @return + */ + override def inputs: Seq[MappingOutputIdentifier] = Seq() + + /** + * Executes this Mapping and returns a corresponding map of DataFrames per output + * + * @param execution + * @param input + * @return + */ + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = { + val schemas = describe(execution, Map()) + if (records.nonEmpty) { + if (schemas.size != 1) + throw new UnsupportedOperationException("MockMapping only supports a single output with specified records") + val (name,schema) = schemas.head + + val values = records.map(_.toArray(schema)) + val df = DataFrameUtils.ofStringValues(execution.spark, values, schema.sparkType) + Map(name -> df) + } + else { + schemas.map { case (name, schema) => + val df = DataFrameUtils.ofSchema(execution.spark, schema.sparkType) + (name, df) + } + } + } + + + /** + * Creates an output identifier for the primary output + * + * @return + */ + override def output: MappingOutputIdentifier = { + MappingOutputIdentifier(identifier, mocked.output.output) + } + + /** + * Lists all outputs of this mapping. Every mapping should have one "main" output + * + * @return + */ + override def outputs: Seq[String] = mocked.outputs + + /** + * Returns the schema as produced by this mapping, relative to the given input schema. The map might not contain + * schema information for all outputs, if the schema cannot be inferred. + * + * @param input + * @return + */ + override def describe(execution: Execution, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = { + mocked.outputs.map(out => out -> describe(execution, Map(), out)).toMap + } + + /** + * Returns the schema as produced by this mapping, relative to the given input schema. If the schema cannot + * be inferred, None will be returned + * + * @param input + * @return + */ + override def describe(execution: Execution, input: Map[MappingOutputIdentifier, StructType], output: String): StructType = { + require(execution != null) + require(input != null) + require(output != null && output.nonEmpty) + + def describe(mapping:Mapping, output:String) : StructType = { + val deps = dependencies(mapping) + mapping.describe(execution, deps, output) + } + def describe2(context:Context, id:MappingOutputIdentifier) : StructType = { + val mapping = context.getMapping(id.mapping) + describe(mapping, id.output) + } + def dependencies(mapping:Mapping) ={ + mapping.inputs + .map(dep => dep -> describe2(mapping.context, dep)) + .toMap + } + + describe(mocked, output) + } +} + + +class MockMappingSpec extends MappingSpec { + @JsonProperty(value = "mapping", required=false) private var mapping:Option[String] = None + @JsonProperty(value = "records", required=false) private var records:Seq[Record] = Seq() + + /** + * Creates the instance of the specified Mapping with all variable interpolation being performed + * @param context + * @return + */ + override def instantiate(context: Context): MockMapping = { + MockMapping( + instanceProperties(context), + MappingIdentifier(context.evaluate(mapping).getOrElse(name)), + records.map(_.map(context.evaluate)) + ) + } +} diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/NullMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/NullMapping.scala new file mode 100644 index 000000000..0b298f18e --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/NullMapping.scala @@ -0,0 +1,101 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import com.fasterxml.jackson.annotation.JsonProperty +import org.apache.spark.sql.DataFrame +import org.apache.spark.sql.Row + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.model.BaseMapping +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.spec.schema.SchemaSpec +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.FieldType +import com.dimajix.flowman.types.StructType + + +case class NullMapping( + instanceProperties:Mapping.Properties, + fields:Seq[Field] = Seq(), + schema:Option[Schema] +) extends BaseMapping { + if (fields.nonEmpty && schema.nonEmpty) + throw new IllegalArgumentException("Cannot specify both fields and schema in NullMapping") + if (fields.isEmpty && schema.isEmpty) + throw new IllegalArgumentException("Need either fields or schema in NullMapping") + + private lazy val effectiveSchema = { + new StructType(schema.map(_.fields).getOrElse(fields)) + } + private lazy val sparkSchema = effectiveSchema.sparkType + + /** + * Returns the dependencies (i.e. names of tables in the Dataflow model) + * + * @return + */ + override def inputs: Seq[MappingOutputIdentifier] = Seq() + + /** + * Executes this Mapping and returns a corresponding map of DataFrames per output + * + * @param execution + * @param input + * @return + */ + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = { + val rdd = execution.spark.sparkContext.emptyRDD[Row] + val df = execution.spark.createDataFrame(rdd, sparkSchema) + + Map("main" -> df) + } + + /** + * Returns the schema as produced by this mapping, relative to the given input schema. The map might not contain + * schema information for all outputs, if the schema cannot be inferred. + * + * @param input + * @return + */ + override def describe(execution: Execution, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = { + Map("main" -> effectiveSchema) + } +} + + + +class NullMappingSpec extends MappingSpec { + @JsonProperty(value = "fields", required=false) private var fields:Map[String,String] = Map() + @JsonProperty(value = "schema", required = false) protected var schema: Option[SchemaSpec] = None + + /** + * Creates the instance of the specified Mapping with all variable interpolation being performed + * @param context + * @return + */ + override def instantiate(context: Context): NullMapping = { + NullMapping( + instanceProperties(context), + context.evaluate(fields).map { case(name,typ) => Field(name, FieldType.of(typ))}.toSeq, + schema.map(_.instantiate(context)) + ) + } +} diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ProjectMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ProjectMapping.scala index 8781ceacf..32a76cd8b 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ProjectMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ProjectMapping.scala @@ -28,7 +28,7 @@ import com.fasterxml.jackson.databind.node.JsonNodeType import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -57,12 +57,12 @@ extends BaseMapping { /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor:Executor, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(tables != null) val df = tables(input) @@ -79,8 +79,8 @@ extends BaseMapping { * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val schema = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ProvidedMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ProvidedMapping.scala index 6c234aa48..c1c755291 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ProvidedMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ProvidedMapping.scala @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -43,15 +43,15 @@ extends BaseMapping { /** * Instantiates the specified table, which must be available in the Spark session * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor:Executor, input:Map[MappingOutputIdentifier,DataFrame]): Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]): Map[String,DataFrame] = { + require(execution != null) require(input != null) - val result = executor.spark.table(table) + val result = execution.spark.table(table) Map("main" -> result) } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RankMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RankMapping.scala index e6af5cfbd..b7e33a0cb 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RankMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RankMapping.scala @@ -25,7 +25,7 @@ import org.apache.spark.sql.functions.col import org.apache.spark.sql.functions.row_number import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -68,12 +68,12 @@ case class RankMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor:Executor, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(tables != null) val df = tables(input) @@ -98,8 +98,8 @@ case class RankMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val result = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ReadRelationMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ReadRelationMapping.scala index eb6ea1079..f4644bf5b 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ReadRelationMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ReadRelationMapping.scala @@ -22,7 +22,8 @@ import org.apache.spark.sql.DataFrame import org.slf4j.LoggerFactory import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.graph.Linker import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -68,12 +69,12 @@ case class ReadRelationMapping( /** * Executes this Transform by reading from the specified source and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor:Executor, input:Map[MappingOutputIdentifier,DataFrame]): Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]): Map[String,DataFrame] = { + require(execution != null) require(input != null) val schema = if (columns.nonEmpty) Some(spark.sql.types.StructType(columns.map(_.sparkField))) else None @@ -81,7 +82,7 @@ case class ReadRelationMapping( // Read relation val rel = context.getRelation(relation) - val df = rel.read(executor, schema, partitions) + val df = rel.read(execution, schema, partitions) // Apply optional filter val result = filter.map(df.filter).getOrElse(df) @@ -94,20 +95,29 @@ case class ReadRelationMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val schema = if (columns.nonEmpty) { + // Use user specified schema StructType(columns) } else { val relation = context.getRelation(this.relation) - StructType(relation.fields) + relation.describe(execution) } Map("main" -> schema) } + + /** + * Creates all known links for building a descriptive graph of the whole data flow + * Params: linker - The linker object to use for creating new edges + */ + override def link(linker: Linker): Unit = { + linker.read(relation, partitions) + } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ReadStreamMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ReadStreamMapping.scala index 9d55049cd..c61bb6b6e 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ReadStreamMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ReadStreamMapping.scala @@ -21,7 +21,7 @@ import org.apache.spark.sql.DataFrame import org.slf4j.LoggerFactory import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -49,19 +49,19 @@ case class ReadStreamMapping ( /** * Executes this Transform by reading from the specified source and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor:Executor, input:Map[MappingOutputIdentifier,DataFrame]): Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]): Map[String,DataFrame] = { + require(execution != null) require(input != null) val schema = if (columns.nonEmpty) Some(SchemaUtils.createSchema(columns.toSeq)) else None logger.info(s"Reading from streaming relation '$relation'") val rel = context.getRelation(relation) - val result = rel.readStream(executor, schema) + val result = rel.readStream(execution, schema) Map("main" -> result) } @@ -71,8 +71,8 @@ case class ReadStreamMapping ( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) if (columns.nonEmpty) { diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RebalanceMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RebalanceMapping.scala index 89fe2bc4a..64fd789cf 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RebalanceMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RebalanceMapping.scala @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -44,12 +44,12 @@ case class RebalanceMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor:Executor, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(input != null) val df = input(this.input) @@ -63,8 +63,8 @@ case class RebalanceMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val result = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RecursiveSqlMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RecursiveSqlMapping.scala index b322cbbe2..2daf8562d 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RecursiveSqlMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RecursiveSqlMapping.scala @@ -36,7 +36,7 @@ import org.apache.spark.sql.catalyst.plans.logical.UnaryNode import org.apache.spark.sql.catalyst.plans.logical.Union import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -66,12 +66,12 @@ extends BaseMapping { /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor:Executor, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(input != null) val statement = this.statement @@ -89,10 +89,10 @@ extends BaseMapping { // Register all input DataFrames as temp views input.foreach(kv => kv._2.createOrReplaceTempView(kv._1.name)) // Execute query - val first = firstDf(executor.spark, statement) + val first = firstDf(execution.spark, statement) val result = fix(first, first.count()) // Call SessionCatalog.dropTempView to avoid unpersisting the possibly cached dataset. - input.foreach(kv => executor.spark.sessionState.catalog.dropTempView(kv._1.name)) + input.foreach(kv => execution.spark.sessionState.catalog.dropTempView(kv._1.name)) Map("main" -> result) } @@ -126,11 +126,11 @@ extends BaseMapping { * @param input * @return */ - override def describe(executor: Executor, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = { - require(executor != null) + override def describe(execution: Execution, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = { + require(execution != null) require(input != null) - val spark = executor.spark + val spark = execution.spark val statement = this.statement // Create dummy data frames diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RepartitionMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RepartitionMapping.scala index 909a7aeff..a8bc9b7c7 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RepartitionMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/RepartitionMapping.scala @@ -21,7 +21,7 @@ import org.apache.spark.sql.DataFrame import org.apache.spark.sql.functions.col import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -33,7 +33,7 @@ case class RepartitionMapping( input:MappingOutputIdentifier, columns:Seq[String], partitions:Int, - sort:Boolean + sort:Boolean=false ) extends BaseMapping { /** * Returns the dependencies of this mapping, which is exactly one input table @@ -47,12 +47,12 @@ case class RepartitionMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor:Executor, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(input != null) val df = input(this.input) @@ -77,8 +77,8 @@ case class RepartitionMapping( * @param input * @return */ - override def describe(executor: Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution: Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val result = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SchemaMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SchemaMapping.scala index 6d2679a6a..044c0cf6c 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SchemaMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SchemaMapping.scala @@ -20,24 +20,29 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Schema import com.dimajix.flowman.spec.schema.SchemaSpec import com.dimajix.flowman.transforms.SchemaEnforcer +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.FieldType import com.dimajix.flowman.types.StructType case class SchemaMapping( instanceProperties:Mapping.Properties, input:MappingOutputIdentifier, - columns:Seq[(String,String)] = Seq(), - schema:Schema = null, + columns:Seq[Field] = Seq(), + schema:Option[Schema] = None, filter:Option[String] = None ) extends BaseMapping { + if (schema.isEmpty && columns.isEmpty) + throw new IllegalArgumentException(s"Require either schema or columns in mapping $name") + /** * Returns the dependencies of this mapping, which is exactly one input table * @@ -50,22 +55,19 @@ extends BaseMapping { /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor:Executor, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(tables != null) - val xfs = if (schema != null) { - SchemaEnforcer(schema.sparkSchema) - } - else if (columns != null && columns.nonEmpty) { - SchemaEnforcer(columns) + val xfs = if(schema.nonEmpty) { + SchemaEnforcer(schema.get.sparkSchema) } else { - throw new IllegalArgumentException(s"Require either schema or columns in mapping $name") + SchemaEnforcer(StructType(columns).sparkType) } val df = tables(input) @@ -82,11 +84,17 @@ extends BaseMapping { * @param input * @return */ - override def describe(executor: Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution: Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) - val result = StructType(schema.fields) + val result = if(schema.nonEmpty) { + StructType(schema.get.fields) + } + else { + StructType(columns) + } + Map("main" -> result) } } @@ -96,7 +104,7 @@ extends BaseMapping { class SchemaMappingSpec extends MappingSpec { @JsonProperty(value = "input", required = true) private var input: String = _ @JsonProperty(value = "columns", required = false) private var columns:Map[String,String] = Map() - @JsonProperty(value = "schema", required = false) private var schema: SchemaSpec = _ + @JsonProperty(value = "schema", required = false) private var schema: Option[SchemaSpec] = None @JsonProperty(value = "filter", required=false) private var filter:Option[String] = None /** @@ -108,8 +116,8 @@ class SchemaMappingSpec extends MappingSpec { SchemaMapping( instanceProperties(context), MappingOutputIdentifier(context.evaluate(this.input)), - context.evaluate(columns).toSeq, - if (schema != null) schema.instantiate(context) else null, + context.evaluate(columns).toSeq.map(kv => Field(kv._1, FieldType.of(kv._2))), + schema.map(_.instantiate(context)), context.evaluate(filter) ) } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SelectMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SelectMapping.scala index 5f386fc7f..cfbb82e2b 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SelectMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SelectMapping.scala @@ -21,7 +21,7 @@ import org.apache.spark.sql.DataFrame import org.apache.spark.sql.functions import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -46,12 +46,12 @@ extends BaseMapping { /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor:Executor, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(input != null) val df = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SortMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SortMapping.scala index 2e090336a..dcf7ae620 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SortMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SortMapping.scala @@ -1,5 +1,5 @@ /* - * Copyright 2018-2019 Kaya Kupferschmidt + * Copyright 2018-2021 Kaya Kupferschmidt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -21,7 +21,7 @@ import org.apache.spark.sql.DataFrame import org.apache.spark.sql.functions.col import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -31,7 +31,7 @@ import com.dimajix.flowman.types.StructType case class SortMapping( instanceProperties:Mapping.Properties, input:MappingOutputIdentifier, - columns:Seq[(String,String)] + columns:Seq[(String,SortOrder)] ) extends BaseMapping { /** * Returns the dependencies (i.e. names of tables in the Dataflow model) @@ -44,20 +44,22 @@ case class SortMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor:Executor, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(tables != null) val df = tables(input) val cols = columns.map(nv => - if (nv._2.toLowerCase == "desc") - col(nv._1).desc - else - col(nv._1).asc + nv._2 match { + case SortOrder(Ascending, NullsFirst) => col(nv._1).asc_nulls_first + case SortOrder(Ascending, NullsLast) => col(nv._1).asc_nulls_last + case SortOrder(Descending, NullsFirst) => col(nv._1).desc_nulls_first + case SortOrder(Descending, NullsLast) => col(nv._1).desc_nulls_last + } ) val result = df.sort(cols:_*) @@ -69,8 +71,8 @@ case class SortMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val result = input(this.input) @@ -94,7 +96,7 @@ class SortMappingSpec extends MappingSpec { SortMapping( instanceProperties(context), MappingOutputIdentifier(context.evaluate(input)), - columns.flatMap(context.evaluate) + columns.flatMap(context.evaluate).map { case(col,order) => col -> SortOrder.of(order) } ) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SortOrder.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SortOrder.scala new file mode 100644 index 000000000..6b7ef3698 --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SortOrder.scala @@ -0,0 +1,67 @@ +/* + * Copyright 2018-2019 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import java.util.Locale + + +object NullOrdering { + def of(str:String) : NullOrdering = { + val tokens = str.toLowerCase(Locale.ROOT).split(' ').toSeq.map(_.trim).filter(_.nonEmpty) + tokens match { + case Seq("nulls", "first") => NullsFirst + case Seq("nulls", "last") => NullsLast + case _ => throw new IllegalArgumentException(s"Unsupported null ordering '$str'") + } + } +} +abstract sealed class NullOrdering +case object NullsFirst extends NullOrdering +case object NullsLast extends NullOrdering + +object SortDirection { + def of(str:String) : SortDirection = { + str.toLowerCase(Locale.ROOT) match { + case "asc" => Ascending + case "desc" => Descending + case _ => throw new IllegalArgumentException(s"Unsupported sort direction '$str'") + } + } +} +abstract sealed class SortDirection { + def defaultNullOrdering: NullOrdering +} +case object Ascending extends SortDirection { + override def defaultNullOrdering: NullOrdering = NullsFirst +} +case object Descending extends SortDirection { + override def defaultNullOrdering: NullOrdering = NullsLast +} + +object SortOrder { + def apply(direction: SortDirection) : SortOrder = SortOrder(direction, direction.defaultNullOrdering) + + def of(str:String) : SortOrder = { + val tokens = str.split(' ').map(_.trim).filter(_.nonEmpty) + tokens.toSeq match { + case Seq(str) => SortOrder(SortDirection.of(str)) + case Seq(d, n1, n2) => SortOrder(SortDirection.of(d), NullOrdering.of(n1 + " " + n2)) + case _ => throw new IllegalArgumentException(s"Unsupported sort order '$str'") + } + } +} +case class SortOrder(direction:SortDirection, nullOrdering: NullOrdering) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SqlMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SqlMapping.scala index 56cd9df09..4f0eb8fc2 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SqlMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/SqlMapping.scala @@ -26,10 +26,11 @@ import org.apache.hadoop.fs.Path import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.spark.sql.DataFrameUtils import com.dimajix.spark.sql.SqlParser @@ -43,20 +44,17 @@ extends BaseMapping { /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor:Executor, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, input:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(input != null) - // Register all input DataFrames as temp views - input.foreach(kv => kv._2.createOrReplaceTempView(kv._1.name)) - // Execute query - val result = executor.spark.sql(statement) - // Call SessionCatalog.dropTempView to avoid unpersisting the possibly cached dataset. - input.foreach(kv => executor.spark.sessionState.catalog.dropTempView(kv._1.name)) + val result = DataFrameUtils.withTempViews(input.map(kv => kv._1.name -> kv._2)) { + execution.spark.sql(statement) + } Map("main" -> result) } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/TemplateMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/TemplateMapping.scala index 456c2ea7e..04c26e328 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/TemplateMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/TemplateMapping.scala @@ -20,8 +20,9 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.ScopeContext +import com.dimajix.flowman.graph.Linker import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingIdentifier @@ -76,15 +77,15 @@ case class TemplateMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor: Executor, input: Map[MappingOutputIdentifier, DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(input != null) - val result = mappingInstance.execute(executor, input) + val result = mappingInstance.execute(execution, input) // Apply optional filter result.map { case(name,df) => name -> filter.map(df.filter).getOrElse(df) } @@ -95,11 +96,11 @@ case class TemplateMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) - mappingInstance.describe(executor, input) + mappingInstance.describe(execution, input) } /** @@ -108,12 +109,20 @@ case class TemplateMapping( * @param input * @return */ - override def describe(executor:Executor, input: Map[MappingOutputIdentifier, StructType], output: String): StructType = { - require(executor != null) + override def describe(execution:Execution, input: Map[MappingOutputIdentifier, StructType], output: String): StructType = { + require(execution != null) require(input != null) require(output != null && output.nonEmpty) - mappingInstance.describe(executor, input, output) + mappingInstance.describe(execution, input, output) + } + + /** + * Creates all known links for building a descriptive graph of the whole data flow + * Params: linker - The linker object to use for creating new edges + */ + override def link(linker: Linker): Unit = { + mappingInstance.link(linker) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/TransitiveChildrenMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/TransitiveChildrenMapping.scala index 731428380..a9d7a8c79 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/TransitiveChildrenMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/TransitiveChildrenMapping.scala @@ -23,7 +23,7 @@ import org.apache.spark.sql.DataFrame import org.apache.spark.sql.functions.col import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -47,12 +47,12 @@ case class TransitiveChildrenMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor: Executor, input: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = { - require(executor != null) + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = { + require(execution != null) require(input != null) def iterate(df:DataFrame) : DataFrame = { @@ -120,8 +120,8 @@ case class TransitiveChildrenMapping( * @param input * @return */ - override def describe(executor:Executor, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = { - require(executor != null) + override def describe(execution:Execution, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = { + require(execution != null) require(input != null) val schema = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnionMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnionMapping.scala index 9a2a1c594..3761ab3c5 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnionMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnionMapping.scala @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -50,12 +50,12 @@ case class UnionMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor:Executor, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { - require(executor != null) + override def execute(execution:Execution, tables:Map[MappingOutputIdentifier,DataFrame]) : Map[String,DataFrame] = { + require(execution != null) require(tables != null) val dfs = inputs.map(tables(_)) @@ -87,8 +87,8 @@ case class UnionMapping( Map("main" -> filteredResult) } - override def describe(executor:Executor, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = { - require(executor != null) + override def describe(execution:Execution, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = { + require(execution != null) require(input != null) val result = diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnitMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnitMapping.scala index d13cffd7d..2fe166225 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnitMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnitMapping.scala @@ -21,7 +21,7 @@ import com.fasterxml.jackson.databind.annotation.JsonDeserialize import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.ScopeContext import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping @@ -86,14 +86,14 @@ case class UnitMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param input * @return */ - override def execute(executor: Executor, input: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = { + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = { mappingInstances .filter(_._2.outputs.contains("main")) - .map{ case (id,mapping) => (id,executor.instantiate(mapping, "main")) } + .map{ case (id,mapping) => (id,execution.instantiate(mapping, "main")) } } /** @@ -102,14 +102,14 @@ case class UnitMapping( * @param input * @return */ - override def describe(executor:Executor, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = { - require(executor != null) + override def describe(execution:Execution, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = { + require(execution != null) require(input != null) mappingInstances .filter(_._2.outputs.contains("main")) .keys - .map(name => name -> describe(executor, input, name)) + .map(name => name -> describe(execution, input, name)) .toMap } @@ -119,14 +119,14 @@ case class UnitMapping( * @param input * @return */ - override def describe(executor:Executor, input: Map[MappingOutputIdentifier, StructType], output:String): StructType = { - require(executor != null) + override def describe(execution:Execution, input: Map[MappingOutputIdentifier, StructType], output:String): StructType = { + require(execution != null) require(input != null) require(output != null && output.nonEmpty) def describe(mapping:Mapping, output:String) : StructType = { val deps = dependencies(mapping) - mapping.describe(executor, deps, output) + mapping.describe(execution, deps, output) } def describe2(context:Context, id:MappingOutputIdentifier) : StructType = { val mapping = context.getMapping(id.mapping) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnpackJsonMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnpackJsonMapping.scala index 10721201d..659a27138 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnpackJsonMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UnpackJsonMapping.scala @@ -22,7 +22,7 @@ import org.apache.spark.sql.functions.from_json import org.apache.spark.sql.types.StringType import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -62,12 +62,12 @@ case class UnpackJsonMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor: Executor, tables: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { - require(executor != null) + override def execute(execution: Execution, tables: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { + require(execution != null) require(tables != null) val table = tables(input) @@ -97,8 +97,8 @@ case class UnpackJsonMapping( * @param input * @return */ - override def describe(executor:Executor, input: Map[MappingOutputIdentifier, StructType]): Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input: Map[MappingOutputIdentifier, StructType]): Map[String,StructType] = { + require(execution != null) require(input != null) val schema = input(this.input) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UpdateMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UpsertMapping.scala similarity index 86% rename from flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UpdateMapping.scala rename to flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UpsertMapping.scala index c981f6f9a..5a2a6f3d9 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UpdateMapping.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/UpsertMapping.scala @@ -20,7 +20,7 @@ import com.fasterxml.jackson.annotation.JsonProperty import org.apache.spark.sql.DataFrame import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseMapping import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingOutputIdentifier @@ -28,7 +28,7 @@ import com.dimajix.flowman.transforms.SchemaEnforcer import com.dimajix.flowman.types.StructType -case class UpdateMapping( +case class UpsertMapping( instanceProperties:Mapping.Properties, input:MappingOutputIdentifier, updates:MappingOutputIdentifier, @@ -47,12 +47,12 @@ case class UpdateMapping( /** * Executes this MappingType and returns a corresponding DataFrame * - * @param executor + * @param execution * @param tables * @return */ - override def execute(executor: Executor, tables: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { - require(executor != null) + override def execute(execution: Execution, tables: Map[MappingOutputIdentifier, DataFrame]): Map[String,DataFrame] = { + require(execution != null) require(tables != null) require(input != null && input.nonEmpty, "Missing input table") @@ -82,8 +82,8 @@ case class UpdateMapping( * @param input * @return */ - override def describe(executor:Executor, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { - require(executor != null) + override def describe(execution:Execution, input:Map[MappingOutputIdentifier,StructType]) : Map[String,StructType] = { + require(execution != null) require(input != null) val result = input(this.input) @@ -94,7 +94,7 @@ case class UpdateMapping( -class UpdateMappingSpec extends MappingSpec { +class UpsertMappingSpec extends MappingSpec { @JsonProperty(value = "input", required = true) private var input: String = _ @JsonProperty(value = "updates", required = true) private var updates: String = _ @JsonProperty(value = "keyColumns", required = true) private var keyColumns: Seq[String] = Seq() @@ -105,8 +105,8 @@ class UpdateMappingSpec extends MappingSpec { * @param context * @return */ - override def instantiate(context: Context): UpdateMapping = { - UpdateMapping( + override def instantiate(context: Context): UpsertMapping = { + UpsertMapping( instanceProperties(context), MappingOutputIdentifier(context.evaluate(input)), MappingOutputIdentifier(context.evaluate(updates)), diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ValuesMapping.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ValuesMapping.scala new file mode 100644 index 000000000..76fada29b --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/mapping/ValuesMapping.scala @@ -0,0 +1,119 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import com.fasterxml.jackson.annotation.JsonProperty +import org.apache.spark.sql.DataFrame + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.model.BaseMapping +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.spec.schema.SchemaSpec +import com.dimajix.flowman.types.ArrayRecord +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.FieldType +import com.dimajix.flowman.types.MapRecord +import com.dimajix.flowman.types.Record +import com.dimajix.flowman.types.StructType +import com.dimajix.flowman.types.ValueRecord +import com.dimajix.spark.sql.DataFrameUtils + + +case class ValuesMapping( + instanceProperties:Mapping.Properties, + columns:Seq[Field] = Seq(), + schema:Option[Schema] = None, + records:Seq[Record] = Seq() +) extends BaseMapping { + if (schema.isEmpty && columns.isEmpty) + throw new IllegalArgumentException(s"Require either schema or columns in mapping $name") + + /** + * Returns the dependencies (i.e. names of tables in the Dataflow model) + * + * @return + */ + override def inputs: Seq[MappingOutputIdentifier] = Seq() + + /** + * Executes this Mapping and returns a corresponding map of DataFrames per output + * + * @param execution + * @param input + * @return + */ + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = { + val recordsSchema = StructType(schema.map(_.fields).getOrElse(columns)) + val sparkSchema = recordsSchema.sparkType + + val values = records.map(_.toArray(recordsSchema)) + val df = DataFrameUtils.ofStringValues(execution.spark, values, sparkSchema) + Map("main" -> df) + } + + + /** + * Creates an output identifier for the primary output + * + * @return + */ + override def output: MappingOutputIdentifier = { + MappingOutputIdentifier(identifier, "main") + } + + /** + * Lists all outputs of this mapping. Every mapping should have one "main" output + * + * @return + */ + override def outputs: Seq[String] = Seq("main") + + /** + * Returns the schema as produced by this mapping, relative to the given input schema. The map might not contain + * schema information for all outputs, if the schema cannot be inferred. + * + * @param input + * @return + */ + override def describe(execution: Execution, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = { + Map("main" -> StructType(schema.map(_.fields).getOrElse(columns))) + } +} + + +class ValuesMappingSpec extends MappingSpec { + @JsonProperty(value = "schema", required=false) private var schema:Option[SchemaSpec] = None + @JsonProperty(value = "columns", required = false) private var columns:Map[String,String] = Map() + @JsonProperty(value = "records", required=false) private var records:Seq[Record] = Seq() + + /** + * Creates the instance of the specified Mapping with all variable interpolation being performed + * @param context + * @return + */ + override def instantiate(context: Context): ValuesMapping = { + ValuesMapping( + instanceProperties(context), + context.evaluate(columns).toSeq.map(kv => Field(kv._1, FieldType.of(kv._2))), + schema.map(_.instantiate(context)), + records.map(_.map(context.evaluate)) + ) + } +} diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/metric/MetricSinkSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/metric/MetricSinkSpec.scala index d1cea2687..3735cf67d 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/metric/MetricSinkSpec.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/metric/MetricSinkSpec.scala @@ -20,10 +20,10 @@ import com.fasterxml.jackson.annotation.JsonSubTypes import com.fasterxml.jackson.annotation.JsonTypeInfo import com.dimajix.common.TypeRegistry -import com.dimajix.flowman.annotation.MetricSinkType import com.dimajix.flowman.execution.Context import com.dimajix.flowman.metric.MetricSink import com.dimajix.flowman.spec.Spec +import com.dimajix.flowman.spec.annotation.MetricSinkType import com.dimajix.flowman.spi.ClassAnnotationHandler diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/FileRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/FileRelation.scala index 826a3cd40..214f2c57c 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/FileRelation.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/FileRelation.scala @@ -32,7 +32,7 @@ import org.slf4j.LoggerFactory import com.dimajix.common.Trilean import com.dimajix.flowman.catalog.PartitionSpec import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.hadoop.FileCollector import com.dimajix.flowman.hadoop.FileUtils @@ -57,7 +57,8 @@ case class FileRelation( override val partitions: Seq[PartitionField] = Seq(), location:Path, pattern:Option[String] = None, - format:String = "csv" + format:String = "csv", + options:Map[String,String] = Map() ) extends BaseRelation with SchemaRelation with PartitionedRelation { private val logger = LoggerFactory.getLogger(classOf[FileRelation]) @@ -110,13 +111,13 @@ case class FileRelation( /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @param partitions - List of partitions. If none are specified, all the data will be read * @return */ - override def read(executor:Executor, schema:Option[StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame = { - require(executor != null) + override def read(execution:Execution, schema:Option[StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame = { + require(execution != null) require(schema != null) require(partitions != null) @@ -133,13 +134,12 @@ case class FileRelation( logger.info(s"File relation '$identifier' reads ${paths.size} files under location '${location}' in partition ${partition.spec}") val pathNames = paths.map(_.toString) - val reader = this.reader(executor) - .format(format) + val reader = this.reader(execution, format, options) // Use either load(files) or load(single_file) - this actually results in different code paths in Spark // load(single_file) will set the "path" option, while load(multiple_files) needs direct support from the // underlying format implementation - val providingClass = DataSource.lookupDataSource(format, executor.spark.sessionState.conf) + val providingClass = DataSource.lookupDataSource(format, execution.spark.sessionState.conf) val df = if (SparkShim.relationSupportsMultiplePaths(providingClass)) { reader.load(pathNames: _*) } @@ -156,12 +156,12 @@ case class FileRelation( /** * Writes data into the relation, possibly into a specific partition - * @param executor + * @param execution * @param df - dataframe to write * @param partition - destination partition */ - override def write(executor:Executor, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode = OutputMode.OVERWRITE) : Unit = { - require(executor != null) + override def write(execution:Execution, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode = OutputMode.OVERWRITE) : Unit = { + require(execution != null) require(df != null) require(partition != null) @@ -172,8 +172,7 @@ case class FileRelation( logger.info(s"Writing file relation '$identifier' partition ${HiveDialect.expr.partition(partitionSpec)} to output location '$outputPath' as '$format' with mode '$mode'") - this.writer(executor, df, mode.batchMode) - .format(format) + this.writer(execution, df, format, options, mode.batchMode) .save(outputPath.toString) } @@ -183,18 +182,18 @@ case class FileRelation( * [[write]] is required for getting up-to-date contents. A [[write]] with output mode * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition * - * @param executor + * @param execution * @param partition * @return */ - override def loaded(executor: Executor, partition: Map[String, SingleValue]): Trilean = { - require(executor != null) + override def loaded(execution: Execution, partition: Map[String, SingleValue]): Trilean = { + require(execution != null) require(partition != null) requireValidPartitionKeys(partition) def checkPartition(path:Path) = { - val fs = path.getFileSystem(executor.hadoopConf) + val fs = path.getFileSystem(execution.hadoopConf) FileUtils.isValidFileData(fs, path) } @@ -212,24 +211,24 @@ case class FileRelation( /** * Returns true if the relation already exists, otherwise it needs to be created prior usage * - * @param executor + * @param execution * @return */ - override def exists(executor:Executor) : Trilean = { - require(executor != null) + override def exists(execution:Execution) : Trilean = { + require(execution != null) - val fs = location.getFileSystem(executor.spark.sparkContext.hadoopConfiguration) + val fs = location.getFileSystem(execution.spark.sparkContext.hadoopConfiguration) fs.exists(location) } /** * This method will create the given directory as specified in "location" - * @param executor + * @param execution */ - override def create(executor:Executor, ifNotExists:Boolean=false) : Unit = { - require(executor != null) + override def create(execution:Execution, ifNotExists:Boolean=false) : Unit = { + require(execution != null) - val fs = location.getFileSystem(executor.spark.sparkContext.hadoopConfiguration) + val fs = location.getFileSystem(execution.spark.sparkContext.hadoopConfiguration) if (fs.exists(location)) { if (!ifNotExists) { throw new FileAlreadyExistsException(location.toString) @@ -245,18 +244,18 @@ case class FileRelation( * This will update any existing relation to the specified metadata. Actually for this file based target, the * command will precisely do nothing. * - * @param executor + * @param execution */ - override def migrate(executor:Executor) : Unit = { + override def migrate(execution:Execution) : Unit = { } /** * Removes one or more partitions. - * @param executor + * @param execution * @param partitions */ - override def truncate(executor:Executor, partitions:Map[String,FieldValue] = Map()) : Unit = { - require(executor != null) + override def truncate(execution:Execution, partitions:Map[String,FieldValue] = Map()) : Unit = { + require(execution != null) require(partitions != null) requireValidPartitionKeys(partitions) @@ -281,12 +280,12 @@ case class FileRelation( /** * This method will remove the given directory as specified in "location" - * @param executor + * @param execution */ - override def destroy(executor:Executor, ifExists:Boolean) : Unit = { - require(executor != null) + override def destroy(execution:Execution, ifExists:Boolean) : Unit = { + require(execution != null) - val fs = location.getFileSystem(executor.spark.sparkContext.hadoopConfiguration) + val fs = location.getFileSystem(execution.spark.sparkContext.hadoopConfiguration) if (!fs.exists(location)) { if (!ifExists) { throw new FileNotFoundException(location.toString) @@ -338,6 +337,7 @@ class FileRelationSpec extends RelationSpec with SchemaRelationSpec with Partiti @JsonProperty(value="location", required = true) private var location: String = "/" @JsonProperty(value="format", required = true) private var format: String = "csv" @JsonProperty(value="pattern", required = false) private var pattern: Option[String] = None + @JsonProperty(value="options", required=false) private var options:Map[String,String] = Map() /** * Creates the instance of the specified Relation with all variable interpolation being performed @@ -351,7 +351,8 @@ class FileRelationSpec extends RelationSpec with SchemaRelationSpec with Partiti partitions.map(_.instantiate(context)), new Path(context.evaluate(location)), pattern, - context.evaluate(format) + context.evaluate(format), + context.evaluate(options) ) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/GenericRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/GenericRelation.scala index a92683eda..e8cd3de5a 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/GenericRelation.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/GenericRelation.scala @@ -28,7 +28,7 @@ import org.slf4j.LoggerFactory import com.dimajix.common.Trilean import com.dimajix.common.Unknown import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.jdbc.HiveDialect import com.dimajix.flowman.model.BaseRelation @@ -44,7 +44,8 @@ import com.dimajix.flowman.util.SchemaUtils case class GenericRelation( override val instanceProperties:Relation.Properties, override val schema:Option[Schema], - format:String + format:String, + options:Map[String,String] = Map() ) extends BaseRelation with SchemaRelation { private val logger = LoggerFactory.getLogger(classOf[FileRelation]) @@ -75,45 +76,45 @@ case class GenericRelation( /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @param partitions - List of partitions. If none are specified, all the data will be read * @return */ - override def read(executor:Executor, schema:Option[StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame = { - require(executor != null) + override def read(execution:Execution, schema:Option[StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame = { + require(execution != null) require(schema != null) require(partitions != null) logger.info(s"Reading generic relation '$identifier'") - val data = reader(executor).load() + val data = reader(execution, format, options).load() SchemaUtils.applySchema(data, schema) } /** * Writes data into the relation, possibly into a specific partition - * @param executor + * @param execution * @param df - dataframe to write * @param partition - destination partition */ - override def write(executor:Executor, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode = OutputMode.OVERWRITE) : Unit = { - require(executor != null) + override def write(execution:Execution, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode = OutputMode.OVERWRITE) : Unit = { + require(execution != null) require(df != null) require(partition != null) logger.info(s"Writing generic relation '$identifier' with mode '$mode'") - writer(executor, df, mode.batchMode) + writer(execution, df, format, options, mode.batchMode) .save() } /** * Returns true if the relation already exists, otherwise it needs to be created prior usage - * @param executor + * @param execution * @return */ - override def exists(executor:Executor) : Trilean = Unknown + override def exists(execution:Execution) : Trilean = Unknown /** @@ -121,73 +122,46 @@ case class GenericRelation( * [[write]] is required for getting up-to-date contents. A [[write]] with output mode * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition * - * @param executor + * @param execution * @param partition * @return */ - override def loaded(executor: Executor, partition: Map[String, SingleValue]): Trilean = Unknown + override def loaded(execution: Execution, partition: Map[String, SingleValue]): Trilean = Unknown /** * This method will create the given directory as specified in "location" * - * @param executor + * @param execution */ - override def create(executor:Executor, ifNotExists:Boolean=false) : Unit = {} + override def create(execution:Execution, ifNotExists:Boolean=false) : Unit = {} /** * This will update any existing relation to the specified metadata. Actually for this file based target, the * command will precisely do nothing. * - * @param executor + * @param execution */ - override def migrate(executor:Executor) : Unit = {} + override def migrate(execution:Execution) : Unit = {} /** * Removes one or more partitions. - * @param executor + * @param execution * @param partitions */ - override def truncate(executor:Executor, partitions:Map[String,FieldValue] = Map()) : Unit = {} + override def truncate(execution:Execution, partitions:Map[String,FieldValue] = Map()) : Unit = {} /** * This method will remove the given directory as specified in "location" - * @param executor + * @param execution */ - override def destroy(executor:Executor, ifExists:Boolean) : Unit = {} - - /** - * Creates a DataFrameReader with bells and whistles configured from the specification - * - * @param executor - * @return - */ - protected override def reader(executor:Executor) : DataFrameReader = { - val reader = executor.spark.read - .format(format) - .options(options) - - // Apply explicit schema, and load dataFrame - inputSchema.foreach(s => reader.schema(s)) - - reader - } - - /** - * Creates a DataFrameReader with bells and whistles configured from the specification - * - * @param executor - * @return - */ - protected override def writer(executor:Executor, df:DataFrame, saveMode: SaveMode) : DataFrameWriter[Row] = { - super.writer(executor, df, saveMode) - .format(format) - } + override def destroy(execution:Execution, ifExists:Boolean) : Unit = {} } class GenericRelationSpec extends RelationSpec with SchemaRelationSpec { @JsonProperty(value="format", required = true) private var format: String = "csv" + @JsonProperty(value="options", required=false) private var options:Map[String,String] = Map() /** * Creates the instance of the specified Relation with all variable interpolation being performed @@ -198,7 +172,8 @@ class GenericRelationSpec extends RelationSpec with SchemaRelationSpec { GenericRelation( instanceProperties(context), schema.map(_.instantiate(context)), - context.evaluate(format) + context.evaluate(format), + context.evaluate(options) ) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveRelation.scala index cd81b85ae..a5ea366f9 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveRelation.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveRelation.scala @@ -22,7 +22,7 @@ import org.apache.spark.sql.types.StructType import org.slf4j.Logger import com.dimajix.common.Trilean -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.model.BaseRelation import com.dimajix.flowman.model.PartitionedRelation import com.dimajix.flowman.types.FieldValue @@ -39,19 +39,19 @@ abstract class HiveRelation extends BaseRelation with PartitionedRelation { /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @param partitions - List of partitions. If none are specified, all the data will be read * @return */ - override def read(executor: Executor, schema: Option[StructType], partitions: Map[String, FieldValue] = Map()): DataFrame = { - require(executor != null) + override def read(execution: Execution, schema: Option[StructType], partitions: Map[String, FieldValue] = Map()): DataFrame = { + require(execution != null) require(schema != null) require(partitions != null) logger.info(s"Reading Hive relation '$identifier' from table $tableIdentifier using partition values $partitions") - val reader = executor.spark.read.options(options) + val reader = execution.spark.read val tableDf = reader.table(tableIdentifier.unquotedString) val df = filterPartition(tableDf, partitions) @@ -60,13 +60,13 @@ abstract class HiveRelation extends BaseRelation with PartitionedRelation { /** * Returns true if the relation already exists, otherwise it needs to be created prior usage - * @param executor + * @param execution * @return */ - override def exists(executor:Executor) : Trilean = { - require(executor != null) + override def exists(execution:Execution) : Trilean = { + require(execution != null) - val catalog = executor.catalog + val catalog = execution.catalog catalog.tableExists(tableIdentifier) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveTableRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveTableRelation.scala index 365c58fe0..5a55f9518 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveTableRelation.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveTableRelation.scala @@ -41,7 +41,7 @@ import com.dimajix.common.Unknown import com.dimajix.common.Trilean import com.dimajix.flowman.catalog.PartitionSpec import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.IncompatibleSchemaException import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.hadoop.FileUtils @@ -71,7 +71,8 @@ case class HiveTableRelation( override val table: String, external: Boolean = false, location: Option[Path] = None, - format: String = "parquet", + format: Option[String] = None, + options: Map[String,String] = Map(), rowFormat: Option[String] = None, inputFormat: Option[String] = None, outputFormat: Option[String] = None, @@ -118,12 +119,12 @@ case class HiveTableRelation( /** * Writes data into the relation, possibly into a specific partition * - * @param executor + * @param execution * @param df - dataframe to write * @param partition - destination partition */ - override def write(executor: Executor, df: DataFrame, partition: Map[String, SingleValue], mode:OutputMode = OutputMode.OVERWRITE): Unit = { - require(executor != null) + override def write(execution: Execution, df: DataFrame, partition: Map[String, SingleValue], mode:OutputMode = OutputMode.OVERWRITE): Unit = { + require(execution != null) require(df != null) require(partition != null) @@ -133,9 +134,9 @@ case class HiveTableRelation( val partitionSpec = schema.spec(partition) if (writer == "hive") - writeHive(executor, df, partitionSpec, mode) + writeHive(execution, df, partitionSpec, mode) else if (writer == "spark") - writeSpark(executor, df, partitionSpec, mode) + writeSpark(execution, df, partitionSpec, mode) else throw new IllegalArgumentException("Hive relations only support write modes 'hive' and 'spark'") } @@ -148,7 +149,7 @@ case class HiveTableRelation( * @param partitionSpec * @param mode */ - private def writeHive(executor: Executor, df: DataFrame, partitionSpec: PartitionSpec, mode:OutputMode): Unit = { + private def writeHive(executor: Execution, df: DataFrame, partitionSpec: PartitionSpec, mode:OutputMode): Unit = { require(executor != null) require(df != null) require(partitionSpec != null) @@ -185,10 +186,13 @@ case class HiveTableRelation( catalog.refreshPartition(tableIdentifier, partitionSpec) } else { - outputDf.write + // Create and configure writer + val writer = outputDf.write .mode(mode.batchMode) .options(options) - .insertInto(tableIdentifier.unquotedString) + format.foreach(writer.format) + + writer.insertInto(tableIdentifier.unquotedString) catalog.refreshTable(tableIdentifier) } @@ -203,7 +207,7 @@ case class HiveTableRelation( * @param partitionSpec * @param mode */ - private def writeSpark(executor: Executor, df: DataFrame, partitionSpec: PartitionSpec, mode:OutputMode): Unit = { + private def writeSpark(executor: Execution, df: DataFrame, partitionSpec: PartitionSpec, mode:OutputMode): Unit = { require(executor != null) require(df != null) require(partitionSpec != null) @@ -217,14 +221,14 @@ case class HiveTableRelation( val outputPath = partitionSpec.path(location.get, partitions.map(_.name)) // Perform Hive => Spark format mapping - val format = this.format.toLowerCase(Locale.ROOT) match { - case "avro" => "com.databricks.spark.avro" - case _ => this.format + val format = this.format.map(_.toLowerCase(Locale.ROOT)) match { + case Some("avro") => "com.databricks.spark.avro" + case Some(f) => f + case None => throw new IllegalArgumentException("Require 'format' for directly writing to Hive tables") } logger.info(s"Writing to output location '$outputPath' (partition=${partitionSpec.toMap}) as '$format'") - this.writer(executor, df, mode.batchMode) - .format(format) + this.writer(executor, df, format, options, mode.batchMode) .save(outputPath.toString) // Finally add Hive partition @@ -240,16 +244,16 @@ case class HiveTableRelation( /** * Cleans either individual partitions (for partitioned tables) or truncates a whole table * - * @param executor + * @param execution * @param partitions */ - override def truncate(executor: Executor, partitions: Map[String, FieldValue]): Unit = { - require(executor != null) + override def truncate(execution: Execution, partitions: Map[String, FieldValue]): Unit = { + require(execution != null) require(partitions != null) requireValidPartitionKeys(partitions) - val catalog = executor.catalog + val catalog = execution.catalog // When no partitions are specified, this implies that the whole table is to be truncated if (partitions.nonEmpty) { val partitionSchema = PartitionSchema(this.partitions) @@ -270,17 +274,17 @@ case class HiveTableRelation( * [[write]] is required for getting up-to-date contents. A [[write]] with output mode * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition * - * @param executor + * @param execution * @param partition * @return */ - override def loaded(executor: Executor, partition: Map[String, SingleValue]): Trilean = { - require(executor != null) + override def loaded(execution: Execution, partition: Map[String, SingleValue]): Trilean = { + require(execution != null) require(partition != null) requireValidPartitionKeys(partition) - val catalog = executor.catalog + val catalog = execution.catalog if (partitions.nonEmpty) { val schema = PartitionSchema(partitions) val partitionSpec = schema.spec(partition) @@ -291,7 +295,7 @@ case class HiveTableRelation( // Since we do not know for an unpartitioned table if it contains data, we simply return "Unknown" if (catalog.tableExists(tableIdentifier)) { val location = catalog.getTableLocation(tableIdentifier) - val fs = location.getFileSystem(executor.hadoopConf) + val fs = location.getFileSystem(execution.hadoopConf) FileUtils.isValidHiveData(fs, location) } else { @@ -303,12 +307,12 @@ case class HiveTableRelation( /** * Creates a Hive table by executing the appropriate DDL * - * @param executor + * @param execution */ - override def create(executor: Executor, ifNotExists:Boolean=false): Unit = { - require(executor != null) + override def create(execution: Execution, ifNotExists:Boolean=false): Unit = { + require(execution != null) - if (!ifNotExists || exists(executor) == No) { + if (!ifNotExists || exists(execution) == No) { val sparkSchema = StructType(fields.map(_.sparkField)) logger.info(s"Creating Hive table relation '$identifier' with table $tableIdentifier and schema\n ${sparkSchema.treeString}") @@ -319,12 +323,12 @@ case class HiveTableRelation( logger.info(s"Storing Avro schema at location $avroSchemaUrl") new SchemaWriter(schema.toSeq.flatMap(_.fields)) .format("avro") - .save(executor.fs.file(avroSchemaUrl)) + .save(execution.fs.file(avroSchemaUrl)) } - val defaultStorage = HiveSerDe.getDefaultStorage(executor.spark.sessionState.conf) - val fileStorage: CatalogStorageFormat = if (format != null && format.nonEmpty) { - HiveSerDe.sourceToSerDe(format) match { + val defaultStorage = HiveSerDe.getDefaultStorage(execution.spark.sessionState.conf) + val fileStorage: CatalogStorageFormat = if (format.exists(_.nonEmpty)) { + HiveSerDe.sourceToSerDe(format.get) match { case Some(s) => CatalogStorageFormat.empty.copy( inputFormat = s.inputFormat, @@ -373,7 +377,7 @@ case class HiveTableRelation( ) // Create table - val catalog = executor.catalog + val catalog = execution.catalog catalog.createTable(catalogTable, false) } } @@ -381,12 +385,12 @@ case class HiveTableRelation( /** * Destroys the Hive table by executing an appropriate DROP statement * - * @param executor + * @param execution */ - override def destroy(executor: Executor, ifExists:Boolean): Unit = { - require(executor != null) + override def destroy(execution: Execution, ifExists:Boolean): Unit = { + require(execution != null) - val catalog = executor.catalog + val catalog = execution.catalog if (!ifExists || catalog.tableExists(tableIdentifier)) { logger.info(s"Destroying Hive table relation '$identifier' by dropping table $tableIdentifier") catalog.dropTable(tableIdentifier) @@ -395,18 +399,18 @@ case class HiveTableRelation( /** * Performs migration of a Hive table by adding new columns - * @param executor + * @param execution */ - override def migrate(executor: Executor): Unit = { - require(executor != null) + override def migrate(execution: Execution): Unit = { + require(execution != null) - val catalog = executor.catalog + val catalog = execution.catalog if (catalog.tableExists(tableIdentifier)) { val table = catalog.getTable(tableIdentifier) if (table.tableType == CatalogTableType.VIEW) { logger.warn(s"TABLE target $tableIdentifier is currently a VIEW, dropping...") catalog.dropTable(tableIdentifier, false) - create(executor, false) + create(execution, false) } else { val sourceSchema = schema.get.sparkSchema @@ -433,6 +437,10 @@ case class HiveTableRelation( } } + override protected def outputSchema(execution:Execution) : Option[StructType] = { + Some(execution.catalog.getTable(tableIdentifier).dataSchema) + } + /** * Applies the specified schema and converts all field names to lowercase. This is required when directly * writing into HDFS and using Hive, since Hive only supports lower-case field names. @@ -440,9 +448,8 @@ case class HiveTableRelation( * @param df * @return */ - override protected def applyOutputSchema(executor:Executor, df: DataFrame) : DataFrame = { - val outputSchema = Some(executor.catalog.getTable(tableIdentifier).dataSchema) - val mixedCaseDf = SchemaUtils.applySchema(df, outputSchema) + override protected def applyOutputSchema(execution:Execution, df: DataFrame) : DataFrame = { + val mixedCaseDf = SchemaUtils.applySchema(df, outputSchema(execution)) if (needsLowerCaseSchema) { val lowerCaseSchema = SchemaUtils.toLowerCase(mixedCaseDf.schema) df.sparkSession.createDataFrame(mixedCaseDf.rdd, lowerCaseSchema) @@ -466,7 +473,8 @@ class HiveTableRelationSpec extends RelationSpec with SchemaRelationSpec with Pa @JsonProperty(value = "table", required = true) private var table: String = "" @JsonProperty(value = "external", required = false) private var external: String = "false" @JsonProperty(value = "location", required = false) private var location: Option[String] = None - @JsonProperty(value = "format", required = false) private var format: String = _ + @JsonProperty(value = "format", required = false) private var format: Option[String] = None + @JsonProperty(value = "options", required=false) private var options:Map[String,String] = Map() @JsonProperty(value = "rowFormat", required = false) private var rowFormat: Option[String] = None @JsonProperty(value = "inputFormat", required = false) private var inputFormat: Option[String] = None @JsonProperty(value = "outputFormat", required = false) private var outputFormat: Option[String] = None @@ -489,6 +497,7 @@ class HiveTableRelationSpec extends RelationSpec with SchemaRelationSpec with Pa context.evaluate(external).toBoolean, context.evaluate(location).map(p => new Path(context.evaluate(p))), context.evaluate(format), + context.evaluate(options), context.evaluate(rowFormat), context.evaluate(inputFormat), context.evaluate(outputFormat), diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveUnionTableRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveUnionTableRelation.scala index 6907b52bd..4c04be5ec 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveUnionTableRelation.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveUnionTableRelation.scala @@ -31,7 +31,7 @@ import com.dimajix.common.Unknown import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context import com.dimajix.flowman.execution.ExecutionException -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.jdbc.HiveDialect import com.dimajix.flowman.model.BaseRelation @@ -73,7 +73,8 @@ case class HiveUnionTableRelation( viewDatabase: Option[String] = None, view: String, external: Boolean = false, - format: String = "parquet", + format: Option[String] = None, + options: Map[String,String] = Map(), rowFormat: Option[String] = None, inputFormat: Option[String] = None, outputFormat: Option[String] = None, @@ -87,7 +88,7 @@ case class HiveUnionTableRelation( TableIdentifier(tablePrefix + "_" + version.toString, tableDatabase) } - private def listTables(executor: Executor) : Seq[TableIdentifier] = { + private def listTables(executor: Execution) : Seq[TableIdentifier] = { val catalog = executor.catalog val regex = (TableIdentifier(tablePrefix, tableDatabase).unquotedString + "_[0-9]+").r catalog.listTables(tableDatabase.getOrElse(catalog.currentDatabase), tablePrefix + "_*") @@ -114,6 +115,7 @@ case class HiveUnionTableRelation( external, location, format, + options, rowFormat, inputFormat, outputFormat, @@ -133,7 +135,7 @@ case class HiveUnionTableRelation( ) } - private def viewRelationFromTables(executor: Executor) : HiveViewRelation = { + private def viewRelationFromTables(executor: Execution) : HiveViewRelation = { val tables = listTables(executor) val spark = executor.spark val df = tables.map(t => spark.read.table(t.unquotedString)) @@ -183,19 +185,19 @@ case class HiveUnionTableRelation( /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @param partitions - List of partitions. If none are specified, all the data will be read * @return */ - override def read(executor: Executor, schema: Option[StructType], partitions: Map[String, FieldValue]): DataFrame = { - require(executor != null) + override def read(execution: Execution, schema: Option[StructType], partitions: Map[String, FieldValue]): DataFrame = { + require(execution != null) require(schema != null) require(partitions != null) logger.info(s"Reading from Hive union relation '$identifier' from UNION VIEW $viewIdentifier using partition values $partitions") - val tableDf = executor.spark.read.table(viewIdentifier.unquotedString) + val tableDf = execution.spark.read.table(viewIdentifier.unquotedString) val df = filterPartition(tableDf, partitions) SchemaUtils.applySchema(df, schema) @@ -204,22 +206,22 @@ case class HiveUnionTableRelation( /** * Writes data into the relation, possibly into a specific partition * - * @param executor + * @param execution * @param df - dataframe to write * @param partition - destination partition */ - override def write(executor: Executor, df: DataFrame, partition: Map[String, SingleValue], mode: OutputMode): Unit = { - require(executor != null) + override def write(execution: Execution, df: DataFrame, partition: Map[String, SingleValue], mode: OutputMode): Unit = { + require(execution != null) requireAllPartitionKeys(partition) - val catalog = executor.catalog + val catalog = execution.catalog val partitionSchema = PartitionSchema(this.partitions) val partitionSpec = partitionSchema.spec(partition) logger.info(s"Writing to Hive union relation '$identifier' using partition values ${HiveDialect.expr.partition(partitionSpec)}") // 1. Find all tables - val allTables = listTables(executor) + val allTables = listTables(execution) // 2. Find appropriate table val table = allTables.find { id => @@ -238,25 +240,25 @@ case class HiveUnionTableRelation( // 4. Write to that table val relation = tableRelation(table, None) - relation.write(executor, df, partition, OutputMode.OVERWRITE) + relation.write(execution, df, partition, OutputMode.OVERWRITE) } /** * Removes one or more partitions. * - * @param executor + * @param execution * @param partitions */ - override def truncate(executor: Executor, partitions: Map[String, FieldValue]): Unit = { - require(executor != null) + override def truncate(execution: Execution, partitions: Map[String, FieldValue]): Unit = { + require(execution != null) require(partitions != null) logger.info(s"Truncating Hive union relation '$identifier' partition $partitions") - listTables(executor) + listTables(execution) .foreach { table => val relation = tableRelation(table, None) - relation.truncate(executor, partitions) + relation.truncate(execution, partitions) } } @@ -266,17 +268,17 @@ case class HiveUnionTableRelation( * [[write]] is required for getting up-to-date contents. A [[write]] with output mode * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition * - * @param executor + * @param execution * @param partition * @return */ - override def loaded(executor: Executor, partition: Map[String, SingleValue]): Trilean = { - require(executor != null) + override def loaded(execution: Execution, partition: Map[String, SingleValue]): Trilean = { + require(execution != null) require(partition != null) requireValidPartitionKeys(partition) - val catalog = executor.catalog + val catalog = execution.catalog if (this.partitions.isEmpty) { if (catalog.tableExists(viewIdentifier)) @@ -289,7 +291,7 @@ case class HiveUnionTableRelation( val partitionSpec = partitionSchema.spec(partition) catalog.tableExists(viewIdentifier) && - listTables(executor).exists { table => + listTables(execution).exists { table => catalog.partitionExists(table, partitionSpec) } } @@ -298,13 +300,13 @@ case class HiveUnionTableRelation( /** * Returns true if the relation already exists, otherwise it needs to be created prior usage * - * @param executor + * @param execution * @return */ - override def exists(executor: Executor): Trilean = { - require(executor != null) + override def exists(execution: Execution): Trilean = { + require(execution != null) - val catalog = executor.catalog + val catalog = execution.catalog catalog.tableExists(viewIdentifier) } @@ -312,23 +314,23 @@ case class HiveUnionTableRelation( * This method will physically create the corresponding relation. This might be a Hive table or a directory. The * relation will not contain any data, but all metadata will be processed * - * @param executor + * @param execution */ - override def create(executor: Executor, ifNotExists: Boolean): Unit = { - require(executor != null) + override def create(execution: Execution, ifNotExists: Boolean): Unit = { + require(execution != null) - if (!ifNotExists || exists(executor) == No) { + if (!ifNotExists || exists(execution) == No) { logger.info(s"Creating Hive union relation '$identifier'") // Create first table using current schema val hiveTableRelation = tableRelation(1) - hiveTableRelation.create(executor, ifNotExists) + hiveTableRelation.create(execution, ifNotExists) // Create initial view - val spark = executor.spark + val spark = execution.spark val df = spark.read.table(hiveTableRelation.tableIdentifier.unquotedString) val sql = new SqlBuilder(df).toSQL val hiveViewRelation = viewRelationFromSql(sql) - hiveViewRelation.create(executor, ifNotExists) + hiveViewRelation.create(execution, ifNotExists) } } @@ -336,20 +338,20 @@ case class HiveUnionTableRelation( * This will delete any physical representation of the relation. Depending on the type only some meta data like * a Hive table might be dropped or also the physical files might be deleted * - * @param executor + * @param execution */ - override def destroy(executor: Executor, ifExists: Boolean): Unit = { - require(executor != null) + override def destroy(execution: Execution, ifExists: Boolean): Unit = { + require(execution != null) - if (!ifExists || exists(executor) == Yes) { - val catalog = executor.catalog + if (!ifExists || exists(execution) == Yes) { + val catalog = execution.catalog // Destroy view logger.info(s"Dropping Hive union relation '$identifier' UNION VIEW $viewIdentifier") catalog.dropView(viewIdentifier, ifExists) // Destroy tables - listTables(executor) + listTables(execution) .foreach { table => logger.info(s"Dropping Hive union relation '$identifier' backend table '$table'") catalog.dropTable(table, false) @@ -360,14 +362,14 @@ case class HiveUnionTableRelation( /** * This will update any existing relation to the specified metadata. * - * @param executor + * @param execution */ - override def migrate(executor: Executor): Unit = { - require(executor != null) + override def migrate(execution: Execution): Unit = { + require(execution != null) - val catalog = executor.catalog + val catalog = execution.catalog val sourceSchema = schema.get.sparkSchema - val allTables = listTables(executor) + val allTables = listTables(execution) // 1. Find all tables // 2. Find appropriate table @@ -394,7 +396,7 @@ case class HiveUnionTableRelation( val missingFields = sourceSchema.filterNot(f => targetFields.contains(f.name.toLowerCase(Locale.ROOT))) if (missingFields.nonEmpty) { val newSchema = StructType(targetSchema.fields ++ missingFields) - logger.info(s"Migrating Hive Untion Table relation '$identifier' by adding new columns ${missingFields.map(_.name).mkString(",")} to Hive table $id. New schema is\n ${newSchema.treeString}") + logger.info(s"Migrating Hive Union Table relation '$identifier' by adding new columns ${missingFields.map(_.name).mkString(",")} to Hive table $id. New schema is\n ${newSchema.treeString}") catalog.addTableColumns(id, missingFields) } @@ -403,14 +405,14 @@ case class HiveUnionTableRelation( // 3.2 Create new table val tableSet = allTables.toSet val version = (1 to 100000).find(n => !tableSet.contains(tableIdentifier(n))).get - logger.info(s"Migrating Hive Untion Table relation '$identifier' by creating new Hive table ${tableIdentifier(version)}") + logger.info(s"Migrating Hive Union Table relation '$identifier' by creating new Hive table ${tableIdentifier(version)}") val hiveTableRelation = tableRelation(version) - hiveTableRelation.create(executor, false) + hiveTableRelation.create(execution, false) } // 4 Always migrate union view, maybe SQL generator changed - val hiveViewRelation = viewRelationFromTables(executor) - hiveViewRelation.migrate(executor) + val hiveViewRelation = viewRelationFromTables(execution) + hiveViewRelation.migrate(execution) } } @@ -424,7 +426,8 @@ class HiveUnionTableRelationSpec extends RelationSpec with SchemaRelationSpec wi @JsonProperty(value = "viewDatabase", required = false) private var viewDatabase: Option[String] = None @JsonProperty(value = "view", required = true) private var view: String = "" @JsonProperty(value = "external", required = false) private var external: String = "false" - @JsonProperty(value = "format", required = false) private var format: String = _ + @JsonProperty(value = "format", required = false) private var format: Option[String] = None + @JsonProperty(value = "options", required=false) private var options:Map[String,String] = Map() @JsonProperty(value = "rowFormat", required = false) private var rowFormat: Option[String] = None @JsonProperty(value = "inputFormat", required = false) private var inputFormat: Option[String] = None @JsonProperty(value = "outputFormat", required = false) private var outputFormat: Option[String] = None @@ -448,6 +451,7 @@ class HiveUnionTableRelationSpec extends RelationSpec with SchemaRelationSpec wi context.evaluate(view), context.evaluate(external).toBoolean, context.evaluate(format), + context.evaluate(options), context.evaluate(rowFormat), context.evaluate(inputFormat), context.evaluate(outputFormat), diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveViewRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveViewRelation.scala index 797132d07..022cd20e2 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveViewRelation.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/HiveViewRelation.scala @@ -23,7 +23,7 @@ import org.slf4j.LoggerFactory import com.dimajix.common.Trilean import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.MappingUtils import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.model.MappingOutputIdentifier @@ -77,16 +77,16 @@ case class HiveViewRelation( db ++ other } - override def write(executor:Executor, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode) : Unit = { + override def write(execution:Execution, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode) : Unit = { throw new UnsupportedOperationException() } /** * Truncating a view actually is non-op - * @param executor + * @param execution * @param partitions */ - override def truncate(executor: Executor, partitions: Map[String, FieldValue]): Unit = { + override def truncate(execution: Execution, partitions: Map[String, FieldValue]): Unit = { } /** @@ -94,22 +94,22 @@ case class HiveViewRelation( * [[write]] is required for getting up-to-date contents. A [[write]] with output mode * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition * - * @param executor + * @param execution * @param partition * @return */ - override def loaded(executor: Executor, partition: Map[String, SingleValue]): Trilean = { - exists(executor) + override def loaded(execution: Execution, partition: Map[String, SingleValue]): Trilean = { + exists(execution) } /** * This method will physically create the corresponding Hive view * - * @param executor + * @param execution */ - override def create(executor:Executor, ifNotExists:Boolean=false) : Unit = { - val select = getSelect(executor) - val catalog = executor.catalog + override def create(execution:Execution, ifNotExists:Boolean=false) : Unit = { + val select = getSelect(execution) + val catalog = execution.catalog if (!ifNotExists || !catalog.tableExists(tableIdentifier)) { logger.info(s"Creating Hive view relation '$identifier' with VIEW $tableIdentifier") catalog.createView(tableIdentifier, select, ifNotExists) @@ -119,12 +119,12 @@ case class HiveViewRelation( /** * This will update any existing Hive view to the current definition. The update will only be performed, if the * definition actually changed. - * @param executor + * @param execution */ - override def migrate(executor:Executor) : Unit = { - val catalog = executor.catalog + override def migrate(execution:Execution) : Unit = { + val catalog = execution.catalog if (catalog.tableExists(tableIdentifier)) { - val newSelect = getSelect(executor) + val newSelect = getSelect(execution) val curTable = catalog.getTable(tableIdentifier) // Check if current table is a VIEW or a table if (curTable.tableType == CatalogTableType.VIEW) { @@ -136,24 +136,24 @@ case class HiveViewRelation( else { logger.warn(s"VIEW target is currently a table, dropping...") catalog.dropTable(tableIdentifier, false) - create(executor, false) + create(execution, false) } } } /** * This will drop the corresponding Hive view - * @param executor + * @param execution */ - override def destroy(executor:Executor, ifExists:Boolean=false) : Unit = { - val catalog = executor.catalog + override def destroy(execution:Execution, ifExists:Boolean=false) : Unit = { + val catalog = execution.catalog if (!ifExists || catalog.tableExists(tableIdentifier)) { logger.info(s"Destroying Hive view relation '$identifier' with VIEW $tableIdentifier") catalog.dropView(tableIdentifier) } } - private def getSelect(executor: Executor) : String = { + private def getSelect(executor: Execution) : String = { val select = sql.orElse(mapping.map(id => buildMappingSql(executor, id))) .getOrElse(throw new IllegalArgumentException("HiveView either requires explicit SQL SELECT statement or mapping")) @@ -162,7 +162,7 @@ case class HiveViewRelation( select } - private def buildMappingSql(executor: Executor, output:MappingOutputIdentifier) : String = { + private def buildMappingSql(executor: Execution, output:MappingOutputIdentifier) : String = { val mapping = context.getMapping(output.mapping) val df = executor.instantiate(mapping, output.output) new SqlBuilder(df).toSQL diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/JdbcRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/JdbcRelation.scala index 7fb9e691b..a77cb2233 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/JdbcRelation.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/JdbcRelation.scala @@ -33,7 +33,7 @@ import org.slf4j.LoggerFactory import com.dimajix.common.Trilean import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.jdbc.JdbcUtils import com.dimajix.flowman.jdbc.SqlDialect @@ -107,12 +107,12 @@ case class JdbcRelation( /** * Reads the configured table from the source - * @param executor + * @param execution * @param schema * @return */ - override def read(executor:Executor, schema:Option[StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame = { - require(executor != null) + override def read(execution:Execution, schema:Option[StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame = { + require(execution != null) require(schema != null) require(partitions != null) @@ -120,12 +120,13 @@ case class JdbcRelation( val (url,props) = createProperties() // Read from database. We do not use this.reader, because Spark JDBC sources do not support explicit schemas - val reader = executor.spark.read.options(options) + val reader = execution.spark.read val tableDf = if (query.nonEmpty) { logger.info(s"Reading data from JDBC source '$identifier' using connection '$connection' using partition values $partitions") reader.format("jdbc") + .options(properties) .option("query", query.get) .option("url", url) .options(props.asScala) @@ -136,20 +137,24 @@ case class JdbcRelation( reader.jdbc(url, tableIdentifier.unquotedString, props) } - val df = filterPartition(tableDf, partitions) + // Apply embedded schema, if it is specified. This will remove/cast any columns not present in the + // explicit schema specification of the relation + val schemaDf = applyInputSchema(tableDf) + + val df = filterPartition(schemaDf, partitions) SchemaUtils.applySchema(df, schema) } /** * Writes a given DataFrame into a JDBC connection * - * @param executor + * @param execution * @param df * @param partition * @param mode */ - override def write(executor:Executor, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode) : Unit = { - require(executor != null) + override def write(execution:Execution, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode) : Unit = { + require(execution != null) require(df != null) require(partition != null) @@ -167,13 +172,13 @@ case class JdbcRelation( if (partition.isEmpty) { // Write partition into DataBase - this.writer(executor, dfExt, mode.batchMode) + this.writer(execution, dfExt, "jdbc", Map(), mode.batchMode) .mode(mode.batchMode) .jdbc(url, tableIdentifier.unquotedString, props) } else { def writePartition(): Unit = { - this.writer(executor, dfExt, SaveMode.Append) + this.writer(execution, dfExt, "jdbc", Map(), SaveMode.Append) .jdbc(url, tableIdentifier.unquotedString, props) } @@ -206,11 +211,11 @@ case class JdbcRelation( /** * Removes one or more partitions. - * @param executor + * @param execution * @param partitions */ - override def truncate(executor: Executor, partitions: Map[String, FieldValue]): Unit = { - require(executor != null) + override def truncate(execution: Execution, partitions: Map[String, FieldValue]): Unit = { + require(execution != null) require(partitions != null) if (query.nonEmpty) @@ -235,11 +240,11 @@ case class JdbcRelation( /** * Returns true if the relation already exists, otherwise it needs to be created prior usage - * @param executor + * @param execution * @return */ - override def exists(executor:Executor) : Trilean = { - require(executor != null) + override def exists(execution:Execution) : Trilean = { + require(execution != null) withConnection{ (con,options) => JdbcUtils.tableExists(con, tableIdentifier, options) @@ -252,12 +257,12 @@ case class JdbcRelation( * [[write]] is required for getting up-to-date contents. A [[write]] with output mode * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition * - * @param executor + * @param execution * @param partition * @return */ - override def loaded(executor: Executor, partition: Map[String, SingleValue]): Trilean = { - require(executor != null) + override def loaded(execution: Execution, partition: Map[String, SingleValue]): Trilean = { + require(execution != null) require(partition != null) withConnection{ (con,options) => @@ -272,10 +277,10 @@ case class JdbcRelation( /** * This method will physically create the corresponding relation in the target JDBC database. * - * @param executor + * @param execution */ - override def create(executor:Executor, ifNotExists:Boolean=false) : Unit = { - require(executor != null) + override def create(execution:Execution, ifNotExists:Boolean=false) : Unit = { + require(execution != null) if (query.nonEmpty) throw new UnsupportedOperationException(s"Cannot create JDBC relation '$identifier' which is defined by an SQL query") @@ -299,10 +304,10 @@ case class JdbcRelation( /** * This method will physically destroy the corresponding relation in the target JDBC database. - * @param executor + * @param execution */ - override def destroy(executor:Executor, ifExists:Boolean=false) : Unit = { - require(executor != null) + override def destroy(execution:Execution, ifExists:Boolean=false) : Unit = { + require(execution != null) if (query.nonEmpty) throw new UnsupportedOperationException(s"Cannot destroy JDBC relation '$identifier' which is defined by an SQL query") @@ -315,10 +320,11 @@ case class JdbcRelation( } } - override def migrate(executor:Executor) : Unit = ??? + override def migrate(execution:Execution) : Unit = ??? /** - * Creates a Spark schema from the list of fields. + * Creates a Spark schema from the list of fields. This JDBC implementation will add partition columns, since + * these are required for reading. * @return */ override protected def inputSchema : Option[StructType] = { @@ -326,10 +332,11 @@ case class JdbcRelation( } /** - * Creates a Spark schema from the list of fields. The list is used for output operations, i.e. for writing + * Creates a Spark schema from the list of fields. The list is used for output operations, i.e. for writing. + * This JDBC implementation will add partition columns, since these are required for writing. * @return */ - override protected def outputSchema : Option[StructType] = { + override protected def outputSchema(execution:Execution) : Option[StructType] = { schema.map(s => StructType(s.fields.map(_.sparkField) ++ partitions.map(_.sparkField))) } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/LocalRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/LocalRelation.scala index 732dbdeb2..eff19a06b 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/LocalRelation.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/LocalRelation.scala @@ -30,7 +30,7 @@ import org.slf4j.LoggerFactory import com.dimajix.common.Trilean import com.dimajix.flowman.catalog.PartitionSpec import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.hadoop.FileCollector import com.dimajix.flowman.jdbc.HiveDialect @@ -55,7 +55,8 @@ case class LocalRelation( override val partitions: Seq[PartitionField], location:Path, pattern:Option[String], - format:String + format:String = "csv", + options:Map[String,String] = Map() ) extends BaseRelation with SchemaRelation with PartitionedRelation { private val logger = LoggerFactory.getLogger(classOf[LocalRelation]) @@ -109,13 +110,13 @@ extends BaseRelation with SchemaRelation with PartitionedRelation { /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @param partitions - List of partitions. If none are specified, all the data will be read * @return */ - override def read(executor: Executor, schema: Option[StructType], partitions: Map[String, FieldValue]): DataFrame = { - require(executor != null) + override def read(execution: Execution, schema: Option[StructType], partitions: Map[String, FieldValue]): DataFrame = { + require(execution != null) require(schema != null) require(partitions != null) @@ -131,7 +132,7 @@ extends BaseRelation with SchemaRelation with PartitionedRelation { val data = mapFiles(partitions) { (partition, paths) => logger.info(s"Local relation '$identifier' reads ${paths.size} files under location '${location}' in partition ${partition.spec}") - val reader = executor.spark.readLocal.options(options) + val reader = execution.spark.readLocal.options(options) inputSchema.foreach(s => reader.schema(s)) val df = reader @@ -149,12 +150,12 @@ extends BaseRelation with SchemaRelation with PartitionedRelation { /** * Writes data into the relation, possibly into a specific partition * - * @param executor + * @param execution * @param df - dataframe to write * @param partition - destination partition */ - override def write(executor: Executor, df: DataFrame, partition: Map[String, SingleValue], mode: OutputMode): Unit = { - require(executor != null) + override def write(execution: Execution, df: DataFrame, partition: Map[String, SingleValue], mode: OutputMode): Unit = { + require(execution != null) require(df != null) require(partition != null) @@ -166,7 +167,7 @@ extends BaseRelation with SchemaRelation with PartitionedRelation { logger.info(s"Writing to local output location '$outputPath' (partition=$partition)") // Create correct schema for output - val outputDf = applyOutputSchema(executor, df) + val outputDf = applyOutputSchema(execution, df) val writer = outputDf.writeLocal.options(options) writer.format(format) @@ -176,11 +177,11 @@ extends BaseRelation with SchemaRelation with PartitionedRelation { /** * Removes one or more partitions. - * @param executor + * @param execution * @param partitions */ - override def truncate(executor: Executor, partitions: Map[String, FieldValue]): Unit = { - require(executor != null) + override def truncate(execution: Execution, partitions: Map[String, FieldValue]): Unit = { + require(execution != null) require(partitions != null) if (this.partitions.nonEmpty) @@ -204,11 +205,11 @@ extends BaseRelation with SchemaRelation with PartitionedRelation { /** * Returns true if the relation already exists, otherwise it needs to be created prior usage - * @param executor + * @param execution * @return */ - override def exists(executor:Executor) : Trilean = { - require(executor != null) + override def exists(execution:Execution) : Trilean = { + require(execution != null) new File(localDirectory).exists() } @@ -219,12 +220,12 @@ extends BaseRelation with SchemaRelation with PartitionedRelation { * [[write]] is required for getting up-to-date contents. A [[write]] with output mode * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition * - * @param executor + * @param execution * @param partition * @return */ - override def loaded(executor: Executor, partition: Map[String, SingleValue]): Trilean = { - require(executor != null) + override def loaded(execution: Execution, partition: Map[String, SingleValue]): Trilean = { + require(execution != null) require(partition != null) requireValidPartitionKeys(partition) @@ -246,10 +247,10 @@ extends BaseRelation with SchemaRelation with PartitionedRelation { * This method will physically create the corresponding relation. This might be a Hive table or a directory. The * relation will not contain any data, but all metadata will be processed * - * @param executor + * @param execution */ - override def create(executor: Executor, ifNotExists:Boolean=false): Unit = { - require(executor != null) + override def create(execution: Execution, ifNotExists:Boolean=false): Unit = { + require(execution != null) val path = new File(localDirectory) if (path.exists()) { @@ -267,19 +268,19 @@ extends BaseRelation with SchemaRelation with PartitionedRelation { * This will update any existing relation to the specified metadata. Actually for this file based target, the * command will precisely do nothing. * - * @param executor + * @param execution */ - override def migrate(executor: Executor): Unit = { + override def migrate(execution: Execution): Unit = { } /** * This will delete any physical representation of the relation. Depending on the type only some meta data like * a Hive table might be dropped or also the physical files might be deleted * - * @param executor + * @param execution */ - override def destroy(executor: Executor, ifExists:Boolean=false): Unit = { - require(executor != null) + override def destroy(execution: Execution, ifExists:Boolean=false): Unit = { + require(execution != null) val dir = localDirectory logger.info(s"Removing local directory '$dir' of local file relation") @@ -337,6 +338,7 @@ extends BaseRelation with SchemaRelation with PartitionedRelation { class LocalRelationSpec extends RelationSpec with SchemaRelationSpec with PartitionedRelationSpec { @JsonProperty(value="location", required=true) private var location: String = "/" @JsonProperty(value="format", required=true) private var format: String = "csv" + @JsonProperty(value="options", required=false) private var options:Map[String,String] = Map() @JsonProperty(value="pattern", required=false) private var pattern: Option[String] = None /** @@ -351,7 +353,8 @@ class LocalRelationSpec extends RelationSpec with SchemaRelationSpec with Partit partitions.map(_.instantiate(context)), makePath(context.evaluate(location)), pattern, - context.evaluate(format) + context.evaluate(format), + context.evaluate(options) ) } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/MockRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/MockRelation.scala new file mode 100644 index 000000000..2eabd61e3 --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/MockRelation.scala @@ -0,0 +1,251 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.relation + +import com.fasterxml.jackson.annotation.JsonProperty +import org.apache.spark.sql.DataFrame +import org.apache.spark.sql.types.StructType + +import com.dimajix.common.No +import com.dimajix.common.Trilean +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.execution.OutputMode +import com.dimajix.flowman.model.BaseRelation +import com.dimajix.flowman.model.PartitionField +import com.dimajix.flowman.model.Relation +import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.ResourceIdentifier +import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.model.SchemaRelation +import com.dimajix.flowman.types +import com.dimajix.flowman.types.ArrayRecord +import com.dimajix.flowman.types.FieldValue +import com.dimajix.flowman.types.MapRecord +import com.dimajix.flowman.types.Record +import com.dimajix.flowman.types.SingleValue +import com.dimajix.flowman.types.ValueRecord +import com.dimajix.flowman.util.SchemaUtils +import com.dimajix.spark.sql.DataFrameUtils + + +case class MockRelation( + override val instanceProperties:Relation.Properties, + relation: RelationIdentifier, + records:Seq[Record] = Seq() +) extends BaseRelation with SchemaRelation { + private lazy val mocked = context.getRelation(relation, false) + private var _exists = false + private var _loaded = false + + /** + * Returns the list of all resources which will be created by this relation. + * + * @return + */ + override def provides: Set[ResourceIdentifier] = Set() + + /** + * Returns the list of all resources which will be required by this relation for creation. + * + * @return + */ + override def requires: Set[ResourceIdentifier] = Set() + + /** + * Returns the list of all resources which will are managed by this relation for reading or writing a specific + * partition. The list will be specifically created for a specific partition, or for the full relation (when the + * partition is empty) + * + * @param partitions + * @return + */ + override def resources(partitions: Map[String, FieldValue]): Set[ResourceIdentifier] = Set() + + /** + * Reads data from the relation, possibly from specific partitions + * + * @param execution + * @param schema - the schema to read. If none is specified, all available columns will be read + * @param partitions - List of partitions. If none are specified, all the data will be read + * @return + */ + override def read(execution: Execution, schema: Option[StructType], partitions: Map[String, FieldValue]): DataFrame = { + require(execution != null) + require(schema != null) + require(partitions != null) + + if (records.nonEmpty) { + val fullSchema = this.schema.map(s => com.dimajix.flowman.types.StructType(s.fields ++ this.partitions.map(_.field))) + .getOrElse(throw new IllegalArgumentException("Cannot mock relation with records without schema information")) + + val values = records.map(_.toArray(fullSchema)) + val df = DataFrameUtils.ofStringValues(execution.spark, values, fullSchema.sparkType) + SchemaUtils.applySchema(df, schema) + } + else { + val readSchema = schema.orElse(inputSchema) + .getOrElse(throw new IllegalArgumentException("Mock relation either needs own schema or a desired input schema")) + + DataFrameUtils.ofSchema(execution.spark, readSchema) + } + } + + /** + * Writes data into the relation, possibly into a specific partition + * + * @param execution + * @param df - dataframe to write + * @param partition - destination partition + */ + override def write(execution: Execution, df: DataFrame, partition: Map[String, SingleValue], mode: OutputMode): Unit = { + require(execution != null) + require(partition != null) + + // Force materialization of all records + df.count() + + _exists = true + _loaded = true + } + + /** + * Removes one or more partitions. + * + * @param execution + * @param partitions + */ + override def truncate(execution: Execution, partitions: Map[String, FieldValue]): Unit = { + _loaded = false + } + + /** + * Returns true if the relation already exists, otherwise it needs to be created prior usage. This refers to + * the relation itself, not to the data or a specific partition. [[loaded]] should return [[Yes]] after + * [[[create]] has been called, and it should return [[No]] after [[destroy]] has been called. + * + * @param execution + * @return + */ + override def exists(execution: Execution): Trilean = _exists + + /** + * Returns true if the target partition exists and contains valid data. Absence of a partition indicates that a + * [[write]] is required for getting up-to-date contents. A [[write]] with output mode + * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition + * + * @param execution + * @param partition + * @return + */ + override def loaded(execution: Execution, partition: Map[String, SingleValue]): Trilean = { + if (_loaded) + Yes + else + No + } + + /** + * This method will physically create the corresponding relation. This might be a Hive table or a directory. The + * relation will not contain any data, but all metadata will be processed + * + * @param execution + */ + override def create(execution: Execution, ifNotExists: Boolean): Unit = { + _exists = true + } + + /** + * This will delete any physical representation of the relation. Depending on the type only some meta data like + * a Hive table might be dropped or also the physical files might be deleted + * + * @param execution + */ + override def destroy(execution: Execution, ifExists: Boolean): Unit = { + _loaded = false + _exists = false + } + + /** + * This will update any existing relation to the specified metadata. + * + * @param execution + */ + override def migrate(execution: Execution): Unit = {} + + /** + * Returns the schema of the relation, excluding partition columns + * + * @return + */ + override def schema: Option[Schema] = mocked.schema + + /** + * Returns the list of partition columns + * + * @return + */ + override def partitions: Seq[PartitionField] = mocked.partitions + + /** + * Returns the schema of the relation, either from an explicitly specified schema or by schema inference from + * the physical source + * + * @param execution + * @return + */ + override def describe(execution: Execution): types.StructType = mocked.describe(execution) + + /** + * Creates a Spark schema from the list of fields. This mocking implementation will add partition columns, since + * these are required for reading. + * @return + */ + override protected def inputSchema : Option[StructType] = { + schema.map(s => StructType(s.fields.map(_.sparkField) ++ partitions.map(_.sparkField))) + } + + /** + * Creates a Spark schema from the list of fields. The list is used for output operations, i.e. for writing. + * This mocking implementation will add partition columns, since these are required for writing. + * @return + */ + override protected def outputSchema(execution:Execution) : Option[StructType] = { + schema.map(s => StructType(s.fields.map(_.sparkField) ++ partitions.map(_.sparkField))) + } +} + + +class MockRelationSpec extends RelationSpec { + @JsonProperty(value="relation", required=true) private var relation: Option[String] = None + @JsonProperty(value="records", required=false) private var records:Seq[Record] = Seq() + + /** + * Creates the instance of the specified Relation with all variable interpolation being performed + * @param context + * @return + */ + override def instantiate(context: Context): MockRelation = { + + MockRelation( + instanceProperties(context), + RelationIdentifier(context.evaluate(relation).getOrElse(name)), + records.map(_.map(context.evaluate)) + ) + } +} diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/NullRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/NullRelation.scala index f21f9deee..62f9c5329 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/NullRelation.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/NullRelation.scala @@ -23,7 +23,7 @@ import org.apache.spark.sql.types.StructType import com.dimajix.common.Trilean import com.dimajix.common.Unknown import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.model.BaseRelation import com.dimajix.flowman.model.PartitionField @@ -68,43 +68,41 @@ case class NullRelation( /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema * @param partitions * @return */ - override def read(executor:Executor, schema:Option[StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame = { - require(executor != null) + override def read(execution:Execution, schema:Option[StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame = { + require(execution != null) require(schema != null) require(partitions != null) - if (inputSchema == null && schema.isEmpty) + val readSchema = schema.orElse(inputSchema) + if (readSchema.isEmpty) throw new IllegalArgumentException("Null relation either needs own schema or a desired input schema") - // Add partitions values as columns - val fullSchema = inputSchema.map(s => StructType(s.fields ++ this.partitions.map(_.sparkField))) - val readSchema = schema.orElse(fullSchema).get - val rdd = executor.spark.sparkContext.emptyRDD[Row] - executor.spark.createDataFrame(rdd, readSchema) + val rdd = execution.spark.sparkContext.emptyRDD[Row] + execution.spark.createDataFrame(rdd, readSchema.get) } /** * Writes data into the relation, possibly into a specific partition * - * @param executor + * @param execution * @param df * @param partition */ - override def write(executor:Executor, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode) : Unit = { - require(executor != null) + override def write(execution:Execution, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode) : Unit = { + require(execution != null) require(partition != null) // Force materialization of all records df.count() } - override def truncate(executor: Executor, partitions: Map[String, FieldValue]): Unit = { - require(executor != null) + override def truncate(execution: Execution, partitions: Map[String, FieldValue]): Unit = { + require(execution != null) } @@ -113,28 +111,46 @@ case class NullRelation( * [[write]] is required for getting up-to-date contents. A [[write]] with output mode * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition * - * @param executor + * @param execution * @param partition * @return */ - override def loaded(executor: Executor, partition: Map[String, SingleValue]): Trilean = Unknown + override def loaded(execution: Execution, partition: Map[String, SingleValue]): Trilean = Unknown /** * Returns true if the relation already exists, otherwise it needs to be created prior usage * - * @param executor + * @param execution * @return */ - override def exists(executor:Executor) : Trilean = true + override def exists(execution:Execution) : Trilean = true - override def create(executor: Executor, ifNotExists:Boolean=false): Unit = { - require(executor != null) + override def create(execution: Execution, ifNotExists:Boolean=false): Unit = { + require(execution != null) } - override def destroy(executor: Executor, ifExists:Boolean=false): Unit = { - require(executor != null) + override def destroy(execution: Execution, ifExists:Boolean=false): Unit = { + require(execution != null) } - override def migrate(executor: Executor): Unit = { - require(executor != null) + override def migrate(execution: Execution): Unit = { + require(execution != null) + } + + /** + * Creates a Spark schema from the list of fields. This JDBC implementation will add partition columns, since + * these are required for reading. + * @return + */ + override protected def inputSchema : Option[StructType] = { + schema.map(s => StructType(s.fields.map(_.sparkField) ++ partitions.map(_.sparkField))) + } + + /** + * Creates a Spark schema from the list of fields. The list is used for output operations, i.e. for writing. + * This JDBC implementation will add partition columns, since these are required for writing. + * @return + */ + override protected def outputSchema(execution:Execution) : Option[StructType] = { + schema.map(s => StructType(s.fields.map(_.sparkField) ++ partitions.map(_.sparkField))) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/ProvidedRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/ProvidedRelation.scala index 7d397bd1b..cc1d08a28 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/ProvidedRelation.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/ProvidedRelation.scala @@ -24,7 +24,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.model.BaseRelation import com.dimajix.flowman.model.Relation @@ -72,32 +72,32 @@ class ProvidedRelation( /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema * @param partitions * @return */ - override def read(executor:Executor, schema:Option[StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame = { - require(executor != null) + override def read(execution:Execution, schema:Option[StructType], partitions:Map[String,FieldValue] = Map()) : DataFrame = { + require(execution != null) require(schema != null) require(partitions != null) - val df = executor.spark.table(table) + val df = execution.spark.table(table) SchemaUtils.applySchema(df, schema) } /** * Writes data into the relation, possibly into a specific partition * - * @param executor + * @param execution * @param df * @param partition */ - override def write(executor:Executor, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode) : Unit = { + override def write(execution:Execution, df:DataFrame, partition:Map[String,SingleValue], mode:OutputMode) : Unit = { throw new UnsupportedOperationException(s"Writing into provided table '$table' not supported in relation '$identifier'") } - override def truncate(executor: Executor, partitions: Map[String, FieldValue]): Unit = { + override def truncate(execution: Execution, partitions: Map[String, FieldValue]): Unit = { throw new UnsupportedOperationException(s"Truncating provided table '$table' not supported in relation '$identifier'") } @@ -107,40 +107,40 @@ class ProvidedRelation( * [[write]] is required for getting up-to-date contents. A [[write]] with output mode * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition * - * @param executor + * @param execution * @param partition * @return */ - override def loaded(executor: Executor, partition: Map[String, SingleValue]): Trilean = { - require(executor != null) + override def loaded(execution: Execution, partition: Map[String, SingleValue]): Trilean = { + require(execution != null) require(partition != null) - executor.spark.catalog.tableExists(table) + execution.spark.catalog.tableExists(table) } /** * Returns true if the relation already exists, otherwise it needs to be created prior usage * - * @param executor + * @param execution * @return */ - override def exists(executor:Executor) : Trilean = { - require(executor != null) + override def exists(execution:Execution) : Trilean = { + require(execution != null) - executor.spark.catalog.tableExists(table) + execution.spark.catalog.tableExists(table) } - override def create(executor: Executor, ifNotExists:Boolean=false): Unit = { - if (!ifNotExists && exists(executor) == No) + override def create(execution: Execution, ifNotExists:Boolean=false): Unit = { + if (!ifNotExists && exists(execution) == No) throw new UnsupportedOperationException(s"Cannot create provided table '$table' in relation '$identifier'") } - override def destroy(executor: Executor, ifExists:Boolean=false): Unit = { - if (!ifExists && exists(executor) == Yes) + override def destroy(execution: Execution, ifExists:Boolean=false): Unit = { + if (!ifExists && exists(execution) == Yes) throw new UnsupportedOperationException(s"Cannot destroy provided table '$table' in relation '$identifier'") } - override def migrate(executor: Executor): Unit = {} + override def migrate(execution: Execution): Unit = {} } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/RelationSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/RelationSpec.scala index 19ca82664..6f9b4d52f 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/RelationSpec.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/RelationSpec.scala @@ -22,10 +22,10 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo import com.fasterxml.jackson.databind.util.StdConverter import com.dimajix.common.TypeRegistry -import com.dimajix.flowman.annotation.RelationType import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.Relation import com.dimajix.flowman.spec.NamedSpec +import com.dimajix.flowman.spec.annotation.RelationType import com.dimajix.flowman.spi.ClassAnnotationHandler @@ -43,22 +43,23 @@ object RelationSpec extends TypeRegistry[RelationSpec] { */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "kind", visible=true) @JsonSubTypes(value = Array( - new JsonSubTypes.Type(name = "jdbc", value = classOf[JdbcRelationSpec]), - new JsonSubTypes.Type(name = "table", value = classOf[HiveTableRelationSpec]), - new JsonSubTypes.Type(name = "view", value = classOf[HiveViewRelationSpec]), + new JsonSubTypes.Type(name = "empty", value = classOf[NullRelationSpec]), + new JsonSubTypes.Type(name = "file", value = classOf[FileRelationSpec]), new JsonSubTypes.Type(name = "generic", value = classOf[GenericRelationSpec]), new JsonSubTypes.Type(name = "hiveTable", value = classOf[HiveTableRelationSpec]), new JsonSubTypes.Type(name = "hiveUnionTable", value = classOf[HiveUnionTableRelationSpec]), new JsonSubTypes.Type(name = "hiveView", value = classOf[HiveViewRelationSpec]), - new JsonSubTypes.Type(name = "file", value = classOf[FileRelationSpec]), + new JsonSubTypes.Type(name = "jdbc", value = classOf[JdbcRelationSpec]), new JsonSubTypes.Type(name = "local", value = classOf[LocalRelationSpec]), + new JsonSubTypes.Type(name = "mock", value = classOf[MockRelationSpec]), + new JsonSubTypes.Type(name = "null", value = classOf[NullRelationSpec]), new JsonSubTypes.Type(name = "provided", value = classOf[ProvidedRelationSpec]), + new JsonSubTypes.Type(name = "table", value = classOf[HiveTableRelationSpec]), new JsonSubTypes.Type(name = "template", value = classOf[TemplateRelationSpec]), - new JsonSubTypes.Type(name = "null", value = classOf[NullRelationSpec]) + new JsonSubTypes.Type(name = "view", value = classOf[HiveViewRelationSpec]) )) abstract class RelationSpec extends NamedSpec[Relation] { @JsonProperty(value="description", required = false) private var description: Option[String] = None - @JsonProperty(value="options", required=false) private var options:Map[String,String] = Map() override def instantiate(context:Context) : Relation @@ -76,14 +77,12 @@ abstract class RelationSpec extends NamedSpec[Relation] { name, kind, context.evaluate(labels), - description.map(context.evaluate), - context.evaluate(options) + description.map(context.evaluate) ) } } - class RelationSpecAnnotationHandler extends ClassAnnotationHandler { override def annotation: Class[_] = classOf[RelationType] diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/TemplateRelation.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/TemplateRelation.scala index 4789c304b..3a6fb3efe 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/TemplateRelation.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/relation/TemplateRelation.scala @@ -22,9 +22,10 @@ import org.apache.spark.sql.types.StructType import com.dimajix.common.Trilean import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.execution.ScopeContext +import com.dimajix.flowman.graph.Linker import com.dimajix.flowman.model.BaseRelation import com.dimajix.flowman.model.PartitionField import com.dimajix.flowman.model.Relation @@ -101,45 +102,45 @@ case class TemplateRelation( /** * Reads data from the relation, possibly from specific partitions * - * @param executor + * @param execution * @param schema - the schema to read. If none is specified, all available columns will be read * @param partitions - List of partitions. If none are specified, all the data will be read * @return */ - override def read(executor: Executor, schema: Option[StructType], partitions: Map[String, FieldValue]): DataFrame = { - require(executor != null) + override def read(execution: Execution, schema: Option[StructType], partitions: Map[String, FieldValue]): DataFrame = { + require(execution != null) require(schema != null) require(partitions != null) - relationInstance.read(executor, schema, partitions) + relationInstance.read(execution, schema, partitions) } /** * Writes data into the relation, possibly into a specific partition * - * @param executor + * @param execution * @param df - dataframe to write * @param partition - destination partition */ - override def write(executor: Executor, df: DataFrame, partition: Map[String, SingleValue], mode: OutputMode): Unit = { - require(executor != null) + override def write(execution: Execution, df: DataFrame, partition: Map[String, SingleValue], mode: OutputMode): Unit = { + require(execution != null) require(df != null) require(partition != null) - relationInstance.write(executor, df, partition, mode) + relationInstance.write(execution, df, partition, mode) } /** * Removes one or more partitions. * - * @param executor + * @param execution * @param partitions */ - override def truncate(executor: Executor, partitions: Map[String, FieldValue]): Unit = { - require(executor != null) + override def truncate(execution: Execution, partitions: Map[String, FieldValue]): Unit = { + require(execution != null) require(partitions != null) - relationInstance.truncate(executor, partitions) + relationInstance.truncate(execution, partitions) } @@ -148,62 +149,70 @@ case class TemplateRelation( * [[write]] is required for getting up-to-date contents. A [[write]] with output mode * [[OutputMode.ERROR_IF_EXISTS]] then should not throw an error but create the corresponding partition * - * @param executor + * @param execution * @param partition * @return */ - override def loaded(executor: Executor, partition: Map[String, SingleValue]): Trilean = { - require(executor != null) + override def loaded(execution: Execution, partition: Map[String, SingleValue]): Trilean = { + require(execution != null) require(partition != null) - relationInstance.loaded(executor, partition) + relationInstance.loaded(execution, partition) } /** * Returns true if the relation already exists, otherwise it needs to be created prior usage * - * @param executor + * @param execution * @return */ - override def exists(executor: Executor): Trilean = { - require(executor != null) + override def exists(execution: Execution): Trilean = { + require(execution != null) - relationInstance.exists(executor) + relationInstance.exists(execution) } /** * This method will physically create the corresponding relation. This might be a Hive table or a directory. The * relation will not contain any data, but all metadata will be processed * - * @param executor + * @param execution */ - override def create(executor: Executor, ifNotExists: Boolean): Unit = { - require(executor != null) + override def create(execution: Execution, ifNotExists: Boolean): Unit = { + require(execution != null) - relationInstance.create(executor, ifNotExists) + relationInstance.create(execution, ifNotExists) } /** * This will delete any physical representation of the relation. Depending on the type only some meta data like * a Hive table might be dropped or also the physical files might be deleted * - * @param executor + * @param execution */ - override def destroy(executor: Executor, ifExists: Boolean): Unit = { - require(executor != null) + override def destroy(execution: Execution, ifExists: Boolean): Unit = { + require(execution != null) - relationInstance.destroy(executor, ifExists) + relationInstance.destroy(execution, ifExists) } /** * This will update any existing relation to the specified metadata. * - * @param executor + * @param execution */ - override def migrate(executor: Executor): Unit = { - require(executor != null) + override def migrate(execution: Execution): Unit = { + require(execution != null) - relationInstance.migrate(executor) + relationInstance.migrate(execution) + } + + /** + * Creates all known links for building a descriptive graph of the whole data flow + * Params: linker - The linker object to use for creating new edges + */ + override def link(linker: Linker): Unit = { + relationInstance.link(linker) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/AvroSchema.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/AvroSchema.scala index 4c0504dbd..a356a27d4 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/AvroSchema.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/AvroSchema.scala @@ -18,6 +18,7 @@ package com.dimajix.flowman.spec.schema import java.net.URL +import com.fasterxml.jackson.annotation.JsonProperty import org.apache.hadoop.fs.Path import org.slf4j.LoggerFactory @@ -35,7 +36,8 @@ case class AvroSchema( instanceProperties:Schema.Properties, override val file: Option[Path], override val url: Option[URL], - override val spec: Option[String] + override val spec: Option[String], + nullable: Boolean ) extends ExternalSchema { protected override val logger = LoggerFactory.getLogger(classOf[ExternalSchema]) @@ -47,7 +49,7 @@ case class AvroSchema( val spec = loadSchemaSpec val avroSchema = new org.apache.avro.Schema.Parser().parse(spec) CachedSchema( - AvroSchemaUtils.fromAvro(avroSchema), + AvroSchemaUtils.fromAvro(avroSchema, nullable), Option(avroSchema.getDoc) ) } @@ -56,6 +58,8 @@ case class AvroSchema( class AvroSchemaSpec extends ExternalSchemaSpec { + @JsonProperty(value="nullable", required=false) private var nullable: String = "false" + /** * Creates the instance of the specified Schema with all variable interpolation being performed * @param context @@ -66,7 +70,8 @@ class AvroSchemaSpec extends ExternalSchemaSpec { Schema.Properties(context), file.map(context.evaluate).filter(_.nonEmpty).map(p => new Path(p)), url.map(context.evaluate).filter(_.nonEmpty).map(u => new URL(u)), - context.evaluate(spec) + context.evaluate(spec), + context.evaluate(nullable).toBoolean ) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/EmbeddedSchema.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/EmbeddedSchema.scala index fae8ef303..5c5286fb4 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/EmbeddedSchema.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/EmbeddedSchema.scala @@ -26,9 +26,9 @@ import com.dimajix.flowman.types.Field case class EmbeddedSchema( instanceProperties : Schema.Properties, - description : Option[String], - fields : Seq[Field], - primaryKey : Seq[String] + description : Option[String] = None, + fields : Seq[Field] = Seq(), + primaryKey : Seq[String] = Seq() ) extends AbstractInstance with Schema { } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/MappingSchema.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/MappingSchema.scala index bb39ca15e..593ee17ab 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/MappingSchema.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/MappingSchema.scala @@ -18,7 +18,7 @@ package com.dimajix.flowman.spec.schema import com.fasterxml.jackson.annotation.JsonProperty -import com.dimajix.flowman.execution.AnalyzingExecutor +import com.dimajix.flowman.execution.AnalyzingExecution import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.AbstractInstance import com.dimajix.flowman.model.MappingOutputIdentifier @@ -38,7 +38,7 @@ case class MappingSchema ( mapping: MappingOutputIdentifier ) extends AbstractInstance with Schema { private lazy val cachedFields = { - val executor = new AnalyzingExecutor(context) + val executor = new AnalyzingExecution(context) val instance = context.getMapping(mapping.mapping) executor.describe(instance, mapping.output).fields } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/SchemaSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/SchemaSpec.scala index b040ccc54..d12a873ff 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/SchemaSpec.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/schema/SchemaSpec.scala @@ -20,10 +20,10 @@ import com.fasterxml.jackson.annotation.JsonSubTypes import com.fasterxml.jackson.annotation.JsonTypeInfo import com.dimajix.common.TypeRegistry -import com.dimajix.flowman.annotation.SchemaType import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.Schema import com.dimajix.flowman.spec.Spec +import com.dimajix.flowman.spec.annotation.SchemaType import com.dimajix.flowman.spi.ClassAnnotationHandler @@ -38,11 +38,9 @@ object SchemaSpec extends TypeRegistry[SchemaSpec] { new JsonSubTypes.Type(name = "inline", value = classOf[EmbeddedSchemaSpec]), new JsonSubTypes.Type(name = "embedded", value = classOf[EmbeddedSchemaSpec]), new JsonSubTypes.Type(name = "avro", value = classOf[AvroSchemaSpec]), - new JsonSubTypes.Type(name = "json", value = classOf[JsonSchemaSpec]), new JsonSubTypes.Type(name = "mapping", value = classOf[MappingSchemaSpec]), new JsonSubTypes.Type(name = "relation", value = classOf[RelationSchemaSpec]), new JsonSubTypes.Type(name = "spark", value = classOf[SparkSchemaSpec]), - new JsonSubTypes.Type(name = "swagger", value = classOf[SwaggerSchemaSpec]), new JsonSubTypes.Type(name = "union", value = classOf[UnionSchemaSpec]) )) abstract class SchemaSpec extends Spec[Schema] { diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/storage/StorageSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/storage/StorageSpec.scala index 0efc57c7c..6e53d8eb1 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/storage/StorageSpec.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/storage/StorageSpec.scala @@ -20,9 +20,9 @@ import com.fasterxml.jackson.annotation.JsonSubTypes import com.fasterxml.jackson.annotation.JsonTypeInfo import com.dimajix.common.TypeRegistry -import com.dimajix.flowman.annotation.StoreType import com.dimajix.flowman.execution.Context import com.dimajix.flowman.spec.Spec +import com.dimajix.flowman.spec.annotation.StoreType import com.dimajix.flowman.spi.ClassAnnotationHandler import com.dimajix.flowman.storage.Store diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/BlackholeTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/BlackholeTarget.scala index e857198fb..331cef7bb 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/BlackholeTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/BlackholeTarget.scala @@ -22,7 +22,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.MappingUtils import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.model.BaseTarget @@ -55,11 +55,11 @@ case class BlackholeTarget( /** * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase) : Trilean = { + override def dirty(execution: Execution, phase: Phase) : Trilean = { phase match { case Phase.BUILD => Yes case _ => No @@ -72,7 +72,7 @@ case class BlackholeTarget( * * @param executor */ - override def build(executor:Executor) : Unit = { + override def build(executor:Execution) : Unit = { val mapping = context.getMapping(this.mapping.mapping) val df = executor.instantiate(mapping, this.mapping.output) df.write.format("null").save() diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CompareTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CompareTarget.scala index 9f8cda876..797d0be37 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CompareTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CompareTarget.scala @@ -24,7 +24,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException import com.dimajix.flowman.model.BaseTarget @@ -33,6 +33,7 @@ import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.model.Target import com.dimajix.flowman.spec.dataset.DatasetSpec import com.dimajix.flowman.transforms.SchemaEnforcer +import com.dimajix.spark.sql.DataFrameUtils case class CompareTarget( @@ -64,11 +65,11 @@ case class CompareTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { phase match { case Phase.VERIFY => Yes case _ => No @@ -80,7 +81,7 @@ case class CompareTarget( * * @param executor */ - override protected def verify(executor: Executor): Unit = { + override protected def verify(executor: Execution): Unit = { logger.info(s"Comparing actual dataset '${actual.name}' with expected dataset '${expected.name}'") val expectedDf = expected.read(executor, None) val actualDf = try { @@ -94,73 +95,16 @@ case class CompareTarget( val xfs = SchemaEnforcer(expectedDf.schema) val conformedDf = xfs.transform(actualDf) - val expectedRows = expectedDf.collect().toSeq - val actualRows = conformedDf.collect().toSeq - - if (prepareAnswer(expectedRows) != prepareAnswer(actualRows)) { + val diff = DataFrameUtils.diff(expectedDf, conformedDf) + if (diff.nonEmpty) { logger.error(s"Dataset '${actual.name}' does not equal the expected dataset '${expected.name}'") - logger.error(s"Difference between datasets: \n${genError(expectedRows, actualRows)}") + logger.error(s"Difference between datasets: \n${diff.get}") throw new VerificationFailedException(identifier) } else { logger.info(s"Dataset '${actual.name}' matches the expected dataset '${expected.name}'") } } - - private def prepareAnswer(answer: Seq[Row]): Seq[Row] = { - // Converts data to types that we can do equality comparison using Scala collections. - // For BigDecimal type, the Scala type has a better definition of equality test (similar to - // Java's java.math.BigDecimal.compareTo). - // For binary arrays, we convert it to Seq to avoid of calling java.util.Arrays.equals for - // equality test. - answer.map(prepareRow).sortBy(_.toString()) - } - - // We need to call prepareRow recursively to handle schemas with struct types. - private def prepareRow(row: Row): Row = { - Row.fromSeq(row.toSeq.map { - case null => null - case d: java.math.BigDecimal => BigDecimal(d) - // Convert array to Seq for easy equality checkJob. - case b: Array[_] => b.toSeq - case r: Row => prepareRow(r) - case o => o - }) - } - - private def sideBySide(left: Seq[String], right: Seq[String]): Seq[String] = { - val maxLeftSize = left.map(_.length).max - val leftPadded = left ++ Seq.fill(math.max(right.size - left.size, 0))("") - val rightPadded = right ++ Seq.fill(math.max(left.size - right.size, 0))("") - - leftPadded.zip(rightPadded).map { - case (l, r) => (if (l == r) " " else "!") + l + (" " * ((maxLeftSize - l.length) + 3)) + r - } - } - - private def genError(expectedAnswer: Seq[Row], - sparkAnswer: Seq[Row]): String = { - val getRowType: Option[Row] => String = row => - row.map(row => - if (row.schema == null) { - "struct<>" - } else { - s"${row.schema.catalogString}" - }).getOrElse("struct<>") - - s""" - |== Results == - |${ - sideBySide( - s"== Expected - ${expectedAnswer.size} ==" +: - getRowType(expectedAnswer.headOption) +: - prepareAnswer(expectedAnswer).map(_.toString()), - s"== Actual - ${sparkAnswer.size} ==" +: - getRowType(sparkAnswer.headOption) +: - prepareAnswer(sparkAnswer).map(_.toString())).mkString("\n") - } - """.stripMargin - } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/ConsoleTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/ConsoleTarget.scala index d732bb79d..232363a99 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/ConsoleTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/ConsoleTarget.scala @@ -22,7 +22,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.model.BaseTarget import com.dimajix.flowman.model.Dataset @@ -34,33 +34,37 @@ import com.dimajix.flowman.spec.dataset.DatasetSpec import com.dimajix.flowman.spec.dataset.MappingDataset import com.dimajix.flowman.spec.dataset.RelationDataset import com.dimajix.flowman.types.SingleValue +import com.dimajix.flowman.util.ConsoleUtils object ConsoleTarget { - def apply(context: Context, dataset: Dataset, limit:Int, columns:Seq[String]) : ConsoleTarget = { + def apply(context: Context, dataset: Dataset, limit:Int, columns:Seq[String], header:Boolean, csv:Boolean) : ConsoleTarget = { new ConsoleTarget( Target.Properties(context), dataset, limit, - true, + header, + csv, columns ) } - def apply(context: Context, output: MappingOutputIdentifier, limit:Int, columns:Seq[String]) : ConsoleTarget = { + def apply(context: Context, output: MappingOutputIdentifier, limit:Int, columns:Seq[String], header:Boolean, csv:Boolean) : ConsoleTarget = { new ConsoleTarget( Target.Properties(context), MappingDataset(context, output), limit, - true, + header, + csv, columns ) } - def apply(context: Context, relation: RelationIdentifier, limit:Int, columns:Seq[String], partitions:Map[String,SingleValue]=Map()) : ConsoleTarget = { + def apply(context: Context, relation: RelationIdentifier, limit:Int, columns:Seq[String], partitions:Map[String,SingleValue]=Map(), header:Boolean, csv:Boolean) : ConsoleTarget = { new ConsoleTarget( Target.Properties(context), RelationDataset(context, relation, partitions), limit, - true, + header, + csv, columns ) } @@ -70,6 +74,7 @@ case class ConsoleTarget( dataset:Dataset, limit:Int, header:Boolean, + csv:Boolean, columns:Seq[String] ) extends BaseTarget { /** @@ -92,11 +97,11 @@ case class ConsoleTarget( /** * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase) : Trilean = { + override def dirty(execution: Execution, phase: Phase) : Trilean = { phase match { case Phase.BUILD => Yes case _ => No @@ -108,7 +113,7 @@ case class ConsoleTarget( * * @param executor */ - override def build(executor:Executor) : Unit = { + override def build(executor:Execution) : Unit = { require(executor != null) val dfIn = dataset.read(executor, None) @@ -117,11 +122,7 @@ case class ConsoleTarget( else dfIn - val result = dfOut.limit(limit).collect() - if (header) { - println(dfOut.columns.mkString(",")) - } - result.foreach(record => println(record.mkString(","))) + ConsoleUtils.showDataFrame(dfOut, limit, csv) } } @@ -131,6 +132,7 @@ class ConsoleTargetSpec extends TargetSpec { @JsonProperty(value="input", required=true) private var input:DatasetSpec = _ @JsonProperty(value="limit", required=false) private var limit:String = "100" @JsonProperty(value="header", required=false) private var header:String = "true" + @JsonProperty(value="csv", required=false) private var csv:String = "true" @JsonProperty(value="columns", required=false) private var columns:Seq[String] = Seq() override def instantiate(context: Context): Target = { @@ -139,6 +141,7 @@ class ConsoleTargetSpec extends TargetSpec { input.instantiate(context), context.evaluate(limit).toInt, context.evaluate(header).toBoolean, + context.evaluate(csv).toBoolean, columns.map(context.evaluate) ) } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CopyFileTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CopyFileTarget.scala index 04363fb6f..90df7217f 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CopyFileTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CopyFileTarget.scala @@ -24,7 +24,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException import com.dimajix.flowman.model.BaseTarget @@ -74,19 +74,19 @@ case class CopyFileTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { phase match { case Phase.BUILD => - val fs = executor.fs + val fs = execution.fs val dst = fs.file(target) !dst.exists() case Phase.VERIFY => Yes case Phase.TRUNCATE|Phase.DESTROY => - val fs = executor.fs + val fs = execution.fs val dst = fs.file(target) dst.exists() case _ => No @@ -99,7 +99,7 @@ case class CopyFileTarget( * * @param executor */ - override protected def build(executor: Executor): Unit = { + override protected def build(executor: Execution): Unit = { val fs = executor.fs val src = fs.file(source) val dst = fs.file(target) @@ -112,7 +112,7 @@ case class CopyFileTarget( * * @param executor */ - override def verify(executor: Executor) : Unit = { + override def verify(executor: Execution) : Unit = { require(executor != null) val file = executor.fs.file(target) @@ -127,7 +127,7 @@ case class CopyFileTarget( * * @param executor */ - override protected def truncate(executor: Executor): Unit = { + override protected def truncate(executor: Execution): Unit = { val fs = executor.fs val dst = fs.file(target) if (dst.exists()) { @@ -142,7 +142,7 @@ case class CopyFileTarget( * * @param executor */ - override protected def destroy(executor: Executor): Unit = { + override protected def destroy(executor: Execution): Unit = { truncate(executor) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CopyTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CopyTarget.scala index 2866ffff6..efc18c0dc 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CopyTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CopyTarget.scala @@ -23,8 +23,11 @@ import org.slf4j.LoggerFactory import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes +import com.dimajix.flowman.config.FlowmanConf.DEFAULT_TARGET_OUTPUT_MODE +import com.dimajix.flowman.config.FlowmanConf.DEFAULT_TARGET_PARALLELISM +import com.dimajix.flowman.config.FlowmanConf.DEFAULT_TARGET_REBALANCE import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException @@ -49,8 +52,9 @@ case class CopyTarget( source:Dataset, target:Dataset, schema:Option[CopyTarget.Schema] = None, + mode:OutputMode = OutputMode.OVERWRITE, parallelism:Int = 16, - mode:OutputMode = OutputMode.OVERWRITE + rebalance: Boolean = false ) extends BaseTarget { private val logger = LoggerFactory.getLogger(classOf[CopyTarget]) @@ -88,15 +92,15 @@ case class CopyTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { phase match { - case Phase.BUILD => !target.exists(executor) + case Phase.BUILD => !target.exists(execution) case Phase.VERIFY => Yes - case Phase.TRUNCATE|Phase.DESTROY => target.exists(executor) + case Phase.TRUNCATE|Phase.DESTROY => target.exists(execution) case _ => No } } @@ -107,12 +111,19 @@ case class CopyTarget( * * @param executor */ - override protected def build(executor: Executor): Unit = { + override protected def build(executor: Execution): Unit = { require(executor != null) logger.info(s"Copying dataset ${source.name} to ${target.name}") - val data = source.read(executor, None).coalesce(parallelism) + val dfIn = source.read(executor, None) + val data = + if (parallelism <= 0) + dfIn + else if (rebalance) + dfIn.repartition(parallelism) + else + dfIn.coalesce(parallelism) val conformed = target.describe(executor).map { schema => val xfs = SchemaEnforcer(schema.sparkType) xfs.transform(data) @@ -132,7 +143,7 @@ case class CopyTarget( * * @param executor */ - override protected def verify(executor: Executor): Unit = { + override protected def verify(executor: Execution): Unit = { require(executor != null) if (target.exists(executor) == No) { @@ -153,7 +164,7 @@ case class CopyTarget( * * @param executor */ - override protected def truncate(executor: Executor): Unit = { + override protected def truncate(executor: Execution): Unit = { require(executor != null) target.clean(executor) @@ -173,7 +184,7 @@ case class CopyTarget( * * @param executor */ - override def destroy(executor: Executor): Unit = { + override def destroy(executor: Execution): Unit = { schema.foreach { spec => val outputFile = executor.fs.file(spec.file) if (outputFile.exists()) { @@ -203,17 +214,20 @@ class CopyTargetSpec extends TargetSpec { @JsonProperty(value = "source", required = true) private var source: DatasetSpec = _ @JsonProperty(value = "target", required = true) private var target: DatasetSpec = _ @JsonProperty(value = "schema", required = false) private var schema: Option[CopyTargetSpec.Schema] = None - @JsonProperty(value = "parallelism", required = false) private var parallelism: String = "16" - @JsonProperty(value = "mode", required = false) private var mode: String = "overwrite" + @JsonProperty(value = "mode", required = false) private var mode: Option[String] = None + @JsonProperty(value = "parallelism", required = false) private var parallelism: Option[String] = None + @JsonProperty(value = "rebalance", required=false) private var rebalance:Option[String] = None override def instantiate(context: Context): CopyTarget = { + val conf = context.flowmanConf CopyTarget( instanceProperties(context), source.instantiate(context), target.instantiate(context), schema.map(_.instantiate(context)), - context.evaluate(parallelism).toInt, - OutputMode.ofString(context.evaluate(mode)) + OutputMode.ofString(context.evaluate(mode).getOrElse(conf.getConf(DEFAULT_TARGET_OUTPUT_MODE))), + context.evaluate(parallelism).map(_.toInt).getOrElse(conf.getConf(DEFAULT_TARGET_PARALLELISM)), + context.evaluate(rebalance).map(_.toBoolean).getOrElse(conf.getConf(DEFAULT_TARGET_REBALANCE)) ) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CountTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CountTarget.scala index 50279ff72..4aa5b4a61 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CountTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/CountTarget.scala @@ -22,7 +22,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.MappingUtils import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.model.BaseTarget @@ -63,11 +63,11 @@ case class CountTarget( /** * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase) : Trilean = { + override def dirty(execution: Execution, phase: Phase) : Trilean = { phase match { case Phase.BUILD => Yes case _ => No @@ -79,7 +79,7 @@ case class CountTarget( * * @param executor */ - override def build(executor:Executor) : Unit = { + override def build(executor:Execution) : Unit = { require(executor != null) val mapping = context.getMapping(this.mapping.mapping) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/DeleteFileTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/DeleteFileTarget.scala index 43cb2db16..f7588470b 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/DeleteFileTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/DeleteFileTarget.scala @@ -24,7 +24,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException import com.dimajix.flowman.model.BaseTarget @@ -48,14 +48,14 @@ case class DeleteFileTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { phase match { case Phase.BUILD => - val fs = executor.fs + val fs = execution.fs val file = fs.file(path) !file.exists() case Phase.VERIFY => Yes @@ -68,10 +68,10 @@ case class DeleteFileTarget( * * @param executor */ - override def build(executor:Executor) : Unit = { + override def build(executor:Execution) : Unit = { val fs = executor.fs val file = fs.file(path) - logger.info(s"Deleting remote file '$file' (recursive=$recursive)") + logger.info(s"Deleting file '$file' (recursive=$recursive)") file.delete(recursive) } @@ -80,7 +80,7 @@ case class DeleteFileTarget( * * @param executor */ - override def verify(executor: Executor) : Unit = { + override def verify(executor: Execution) : Unit = { require(executor != null) val file = executor.fs.file(path) diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/FileTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/FileTarget.scala index 5166dc0ab..6cc4c5d7c 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/FileTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/FileTarget.scala @@ -23,9 +23,13 @@ import org.slf4j.LoggerFactory import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes +import com.dimajix.flowman.config.FlowmanConf.DEFAULT_TARGET_OUTPUT_MODE +import com.dimajix.flowman.config.FlowmanConf.DEFAULT_TARGET_PARALLELISM +import com.dimajix.flowman.config.FlowmanConf.DEFAULT_TARGET_REBALANCE import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.MappingUtils +import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException import com.dimajix.flowman.hadoop.FileUtils @@ -38,15 +42,16 @@ import com.dimajix.flowman.model.TargetInstance object FileTarget { def apply(context: Context, mapping:MappingOutputIdentifier, location:Path, format:String, options:Map[String,String]) = { + val conf = context.flowmanConf new FileTarget( Target.Properties(context), mapping, location, format, options, - "overwrite", - 16, - false + OutputMode.ofString(conf.getConf(DEFAULT_TARGET_OUTPUT_MODE)), + conf.getConf(DEFAULT_TARGET_PARALLELISM), + conf.getConf(DEFAULT_TARGET_REBALANCE) ) } } @@ -56,9 +61,9 @@ case class FileTarget( location:Path, format:String, options:Map[String,String], - mode: String, + mode:OutputMode, parallelism: Int, - rebalance: Boolean + rebalance: Boolean = false ) extends BaseTarget { private val logger = LoggerFactory.getLogger(classOf[FileTarget]) @@ -106,30 +111,30 @@ case class FileTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { phase match { case Phase.CREATE => - val fs = location.getFileSystem(executor.spark.sparkContext.hadoopConfiguration) + val fs = location.getFileSystem(execution.spark.sparkContext.hadoopConfiguration) !fs.getFileStatus(location).isDirectory case Phase.BUILD => - val fs = location.getFileSystem(executor.spark.sparkContext.hadoopConfiguration) + val fs = location.getFileSystem(execution.spark.sparkContext.hadoopConfiguration) !FileUtils.isValidFileData(fs, location) case Phase.VERIFY => Yes case Phase.TRUNCATE => - val fs = location.getFileSystem(executor.spark.sparkContext.hadoopConfiguration) + val fs = location.getFileSystem(execution.spark.sparkContext.hadoopConfiguration) fs.listStatus(location).nonEmpty case Phase.DESTROY => - val fs = location.getFileSystem(executor.spark.sparkContext.hadoopConfiguration) + val fs = location.getFileSystem(execution.spark.sparkContext.hadoopConfiguration) fs.exists(location) case _ => No } } - override def create(executor: Executor) : Unit = { + override def create(executor: Execution) : Unit = { require(executor != null) val fs = location.getFileSystem(executor.spark.sparkContext.hadoopConfiguration) @@ -145,21 +150,25 @@ case class FileTarget( * * @param executor */ - override def build(executor: Executor): Unit = { + override def build(executor: Execution): Unit = { require(executor != null) val mapping = context.getMapping(this.mapping.mapping) val dfIn = executor.instantiate(mapping, this.mapping.output) - val table = if (rebalance) - dfIn.repartition(parallelism) - else - dfIn.coalesce(parallelism) + val table = { + if (parallelism <= 0) + dfIn + else if (rebalance) + dfIn.repartition(parallelism) + else + dfIn.coalesce(parallelism) + } logger.info(s"Writing mapping '$mapping' to directory '$location'") table.write .options(options) .format(format) - .mode(mode) + .mode(mode.batchMode) .save(location.toString) } @@ -168,7 +177,7 @@ case class FileTarget( * * @param executor */ - override def verify(executor: Executor) : Unit = { + override def verify(executor: Execution) : Unit = { require(executor != null) val file = executor.fs.file(location) @@ -183,7 +192,7 @@ case class FileTarget( * * @param executor */ - override def truncate(executor: Executor): Unit = { + override def truncate(executor: Execution): Unit = { require(executor != null) val fs = location.getFileSystem(executor.spark.sparkContext.hadoopConfiguration) @@ -194,7 +203,7 @@ case class FileTarget( } } - override def destroy(executor: Executor) : Unit = { + override def destroy(executor: Execution) : Unit = { require(executor != null) val fs = location.getFileSystem(executor.spark.sparkContext.hadoopConfiguration) @@ -211,21 +220,22 @@ class FileTargetSpec extends TargetSpec { @JsonProperty(value="mapping", required=true) private var mapping:String = _ @JsonProperty(value="location", required=true) private var location:String = _ @JsonProperty(value="format", required=false) private var format:String = "csv" - @JsonProperty(value="mode", required=false) private var mode:String = "overwrite" + @JsonProperty(value="mode", required=false) private var mode:Option[String] = None @JsonProperty(value="options", required=false) private var options:Map[String,String] = Map() - @JsonProperty(value="parallelism", required=false) private var parallelism:String = "16" - @JsonProperty(value="rebalance", required=false) private var rebalance:String = "false" + @JsonProperty(value="parallelism", required=false) private var parallelism:Option[String] = None + @JsonProperty(value="rebalance", required=false) private var rebalance:Option[String] = None override def instantiate(context: Context): FileTarget = { + val conf = context.flowmanConf FileTarget( instanceProperties(context), MappingOutputIdentifier.parse(context.evaluate(mapping)), new Path(context.evaluate(location)), context.evaluate(format), context.evaluate(options), - context.evaluate(mode), - context.evaluate(parallelism).toInt, - context.evaluate(rebalance).toBoolean + OutputMode.ofString(context.evaluate(mode).getOrElse(conf.getConf(DEFAULT_TARGET_OUTPUT_MODE))), + context.evaluate(parallelism).map(_.toInt).getOrElse(conf.getConf(DEFAULT_TARGET_PARALLELISM)), + context.evaluate(rebalance).map(_.toBoolean).getOrElse(conf.getConf(DEFAULT_TARGET_REBALANCE)) ) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/GetFileTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/GetFileTarget.scala index de2e6be7a..8859fae01 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/GetFileTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/GetFileTarget.scala @@ -24,7 +24,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException import com.dimajix.flowman.model.BaseTarget @@ -74,19 +74,19 @@ case class GetFileTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { phase match { case Phase.BUILD => - val fs = executor.fs + val fs = execution.fs val dst = fs.local(target) !dst.exists() case Phase.VERIFY => Yes case Phase.TRUNCATE|Phase.DESTROY => - val fs = executor.fs + val fs = execution.fs val dst = fs.local(target) dst.exists() case _ => No @@ -99,7 +99,7 @@ case class GetFileTarget( * * @param executor */ - override protected def build(executor: Executor): Unit = { + override protected def build(executor: Execution): Unit = { val fs = executor.fs val src = fs.file(source) val dst = fs.local(target) @@ -112,7 +112,7 @@ case class GetFileTarget( * * @param executor */ - override protected def verify(executor: Executor): Unit = { + override protected def verify(executor: Execution): Unit = { require(executor != null) val file = executor.fs.local(target) @@ -127,7 +127,7 @@ case class GetFileTarget( * * @param executor */ - override protected def truncate(executor: Executor): Unit = { + override protected def truncate(executor: Execution): Unit = { val fs = executor.fs val dst = fs.local(target) if (dst.exists()) { @@ -142,7 +142,7 @@ case class GetFileTarget( * * @param executor */ - override protected def destroy(executor: Executor): Unit = { + override protected def destroy(executor: Execution): Unit = { truncate(executor) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/HiveDatabaseTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/HiveDatabaseTarget.scala index 2ab7add95..e73c2d07d 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/HiveDatabaseTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/HiveDatabaseTarget.scala @@ -23,7 +23,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException import com.dimajix.flowman.model.BaseTarget @@ -58,15 +58,15 @@ case class HiveDatabaseTarget( /** * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase) : Trilean = { + override def dirty(execution: Execution, phase: Phase) : Trilean = { phase match { - case Phase.CREATE => !executor.catalog.databaseExists(database) + case Phase.CREATE => !execution.catalog.databaseExists(database) case Phase.VERIFY => Yes - case Phase.DESTROY => executor.catalog.databaseExists(database) + case Phase.DESTROY => execution.catalog.databaseExists(database) case _ => No } } @@ -77,7 +77,7 @@ case class HiveDatabaseTarget( * * @param executor */ - override def create(executor: Executor): Unit = { + override def create(executor: Execution): Unit = { require(executor != null) logger.info(s"Creating Hive database '$database'") @@ -89,7 +89,7 @@ case class HiveDatabaseTarget( * * @param executor */ - override def verify(executor: Executor): Unit = { + override def verify(executor: Execution): Unit = { require(executor != null) if (!executor.catalog.databaseExists(database)) { @@ -104,7 +104,7 @@ case class HiveDatabaseTarget( * * @param executor */ - override def destroy(executor: Executor): Unit = { + override def destroy(executor: Execution): Unit = { require(executor != null) logger.info(s"Creating Hive database '$database'") diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/LocalTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/LocalTarget.scala index bc8ce031e..ca1ab8c57 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/LocalTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/LocalTarget.scala @@ -32,7 +32,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.MappingUtils import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException @@ -102,18 +102,18 @@ case class LocalTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { phase match { case Phase.BUILD => - val file = executor.fs.local(path) + val file = execution.fs.local(path) !file.exists() case Phase.VERIFY => Yes case Phase.TRUNCATE|Phase.DESTROY => - val file = executor.fs.local(path) + val file = execution.fs.local(path) file.exists() case _ => No } @@ -124,7 +124,7 @@ case class LocalTarget( * * @param executor */ - override def build(executor:Executor) : Unit = { + override def build(executor:Execution) : Unit = { logger.info(s"Writing mapping '${this.mapping}' to local file '$path'") val mapping = context.getMapping(this.mapping.mapping) @@ -164,7 +164,7 @@ case class LocalTarget( * * @param executor */ - override def verify(executor: Executor) : Unit = { + override def verify(executor: Execution) : Unit = { require(executor != null) val file = executor.fs.local(path) @@ -179,7 +179,7 @@ case class LocalTarget( * * @param executor */ - override def truncate(executor: Executor): Unit = { + override def truncate(executor: Execution): Unit = { require(executor != null) val outputFile = new File(path) @@ -189,7 +189,7 @@ case class LocalTarget( } } - override def destroy(executor: Executor) : Unit = { + override def destroy(executor: Execution) : Unit = { truncate(executor) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/MergeFilesTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/MergeFilesTarget.scala index ae1436723..6c9005206 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/MergeFilesTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/MergeFilesTarget.scala @@ -27,7 +27,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException import com.dimajix.flowman.model.BaseTarget @@ -78,20 +78,20 @@ case class MergeFilesTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { phase match { case Phase.BUILD => - val fs = executor.fs - val dst = fs.local(target) + val fs = execution.fs + val dst = fs.file(target) !dst.exists() case Phase.VERIFY => Yes case Phase.TRUNCATE|Phase.DESTROY => - val fs = executor.fs - val dst = fs.local(target) + val fs = execution.fs + val dst = fs.file(target) dst.exists() case _ => No } @@ -103,13 +103,13 @@ case class MergeFilesTarget( * * @param executor */ - override protected def build(executor: Executor): Unit = { + override protected def build(executor: Execution): Unit = { val fs = executor.fs val src = fs.file(source) - val dst = fs.local(target) + val dst = fs.file(target) val delimiter = Option(this.delimiter).map(_.getBytes(Charset.forName("UTF-8"))).filter(_.nonEmpty) - logger.info(s"Merging remote files in '$src' to local file '$dst' (overwrite=$overwrite)") + logger.info(s"Merging remote files in '$src' to file '$dst' (overwrite=$overwrite)") val output = dst.create(overwrite) try { fs.file(source) @@ -137,12 +137,12 @@ case class MergeFilesTarget( * * @param executor */ - override protected def verify(executor: Executor): Unit = { + override protected def verify(executor: Execution): Unit = { require(executor != null) - val file = executor.fs.local(target) + val file = executor.fs.file(target) if (!file.exists()) { - logger.error(s"Verification of target '$identifier' failed - local file '$target' does not exist") + logger.error(s"Verification of target '$identifier' failed - file file '$target' does not exist") throw new VerificationFailedException(identifier) } } @@ -152,12 +152,12 @@ case class MergeFilesTarget( * * @param executor */ - override protected def truncate(executor: Executor): Unit = { + override protected def truncate(executor: Execution): Unit = { require(executor != null) - val outputFile = executor.fs.local(target) + val outputFile = executor.fs.file(target) if (outputFile.exists()) { - logger.info(s"Removing local file '$target'") + logger.info(s"Removing file file '$target'") outputFile.delete() } } @@ -168,7 +168,7 @@ case class MergeFilesTarget( * * @param executor */ - override protected def destroy(executor: Executor): Unit = { + override protected def destroy(executor: Execution): Unit = { truncate(executor) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/NullTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/NullTarget.scala index f004b4c75..daa5d16c4 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/NullTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/NullTarget.scala @@ -22,7 +22,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.model.BaseTarget import com.dimajix.flowman.model.Target @@ -50,11 +50,11 @@ case class NullTarget( /** * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase) : Trilean = { + override def dirty(execution: Execution, phase: Phase) : Trilean = { phase match { case _ => No } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/PutFileTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/PutFileTarget.scala index fac827d33..21675f344 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/PutFileTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/PutFileTarget.scala @@ -24,7 +24,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException import com.dimajix.flowman.model.BaseTarget @@ -74,24 +74,24 @@ case class PutFileTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { phase match { case Phase.BUILD => - val dst = executor.fs.file(target) + val dst = execution.fs.file(target) !dst.exists() case Phase.VERIFY => Yes case Phase.TRUNCATE|Phase.DESTROY => - val dst = executor.fs.file(target) + val dst = execution.fs.file(target) dst.exists() case _ => No } } - override protected def build(executor:Executor) : Unit = { + override protected def build(executor:Execution) : Unit = { val fs = executor.fs val src = fs.local(source) val dst = fs.file(target) @@ -104,7 +104,7 @@ case class PutFileTarget( * * @param executor */ - override protected def verify(executor: Executor): Unit = { + override protected def verify(executor: Execution): Unit = { require(executor != null) val file = executor.fs.file(target) @@ -119,7 +119,7 @@ case class PutFileTarget( * * @param executor */ - override protected def truncate(executor: Executor): Unit = { + override protected def truncate(executor: Execution): Unit = { require(executor != null) val outputFile = executor.fs.file(target) @@ -135,7 +135,7 @@ case class PutFileTarget( * * @param executor */ - override protected def destroy(executor: Executor): Unit = { + override protected def destroy(executor: Execution): Unit = { truncate(executor) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/RelationTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/RelationTarget.scala index bcd939660..a1ac303d6 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/RelationTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/RelationTarget.scala @@ -23,12 +23,16 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Unknown import com.dimajix.common.Yes +import com.dimajix.flowman.config.FlowmanConf.DEFAULT_TARGET_OUTPUT_MODE +import com.dimajix.flowman.config.FlowmanConf.DEFAULT_TARGET_PARALLELISM +import com.dimajix.flowman.config.FlowmanConf.DEFAULT_TARGET_REBALANCE import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.MappingUtils import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException +import com.dimajix.flowman.graph.Linker import com.dimajix.flowman.metric.LongAccumulatorMetric import com.dimajix.flowman.metric.Selector import com.dimajix.flowman.model.BaseTarget @@ -43,14 +47,15 @@ import com.dimajix.spark.sql.functions.count_records object RelationTarget { def apply(context: Context, relation: RelationIdentifier) : RelationTarget = { + val conf = context.flowmanConf new RelationTarget( Target.Properties(context), MappingOutputIdentifier(""), relation, - OutputMode.OVERWRITE, + OutputMode.ofString(conf.getConf(DEFAULT_TARGET_OUTPUT_MODE)), Map(), - 16, - false + conf.getConf(DEFAULT_TARGET_PARALLELISM), + conf.getConf(DEFAULT_TARGET_REBALANCE) ) } } @@ -125,18 +130,19 @@ case class RelationTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { val partition = this.partition.mapValues(v => SingleValue(v)) val rel = context.getRelation(relation) phase match { + case Phase.VALIDATE => No case Phase.CREATE => // Since an existing relation might need a migration, we return "unknown" - if (rel.exists(executor) == Yes) + if (rel.exists(execution) == Yes) Unknown else Yes @@ -144,22 +150,34 @@ case class RelationTarget( if (mode == OutputMode.APPEND) { Yes } else { - !rel.loaded(executor, partition) + !rel.loaded(execution, partition) } case Phase.VERIFY => Yes case Phase.TRUNCATE => - rel.loaded(executor, partition) + rel.loaded(execution, partition) case Phase.DESTROY => - rel.exists(executor) + rel.exists(execution) } } + + /** + * Creates all known links for building a descriptive graph of the whole data flow + * Params: linker - The linker object to use for creating new edges + */ + override def link(linker: Linker): Unit = { + val partition = this.partition.mapValues(v => SingleValue(v)) + if (mapping.nonEmpty) + linker.input(mapping.mapping, mapping.output) + linker.write(relation, partition) + } + /** * Creates the empty containing (Hive table, SQL table, etc) for holding the data * * @param executor */ - override def create(executor: Executor) : Unit = { + override def create(executor: Execution) : Unit = { require(executor != null) val rel = context.getRelation(relation) @@ -178,7 +196,7 @@ case class RelationTarget( * * @param executor */ - override def build(executor:Executor) : Unit = { + override def build(executor:Execution) : Unit = { require(executor != null) if (mapping.nonEmpty) { @@ -187,10 +205,13 @@ case class RelationTarget( logger.info(s"Writing mapping '${this.mapping}' to relation '$relation' into partition $partition with mode '$mode'") val mapping = context.getMapping(this.mapping.mapping) val dfIn = executor.instantiate(mapping, this.mapping.output) - val dfOut = if (rebalance) - dfIn.repartition(parallelism) - else - dfIn.coalesce(parallelism) + val dfOut = + if (parallelism <= 0) + dfIn + else if (rebalance) + dfIn.repartition(parallelism) + else + dfIn.coalesce(parallelism) // Setup metric for counting number of records val counter = executor.metrics.findMetric(Selector(Some("target_records"), metadata.asMap)) @@ -214,7 +235,7 @@ case class RelationTarget( * * @param executor */ - override def verify(executor: Executor) : Unit = { + override def verify(executor: Execution) : Unit = { require(executor != null) val partition = this.partition.mapValues(v => SingleValue(v)) @@ -229,7 +250,7 @@ case class RelationTarget( * Cleans the target. This will remove any data in the target for the current partition * @param executor */ - override def truncate(executor: Executor): Unit = { + override def truncate(executor: Execution): Unit = { require(executor != null) val partition = this.partition.mapValues(v => SingleValue(v)) @@ -243,7 +264,7 @@ case class RelationTarget( * Destroys both the logical relation and the physical data * @param executor */ - override def destroy(executor: Executor) : Unit = { + override def destroy(executor: Execution) : Unit = { require(executor != null) logger.info(s"Destroying relation '$relation'") @@ -266,20 +287,21 @@ object RelationTargetSpec { class RelationTargetSpec extends TargetSpec { @JsonProperty(value="mapping", required=true) private var mapping:String = "" @JsonProperty(value="relation", required=true) private var relation:String = _ - @JsonProperty(value="mode", required=false) private var mode:String = "overwrite" + @JsonProperty(value="mode", required=false) private var mode:Option[String] = None @JsonProperty(value="partition", required=false) private var partition:Map[String,String] = Map() - @JsonProperty(value="parallelism", required=false) private var parallelism:String = "16" - @JsonProperty(value="rebalance", required=false) private var rebalance:String = "false" + @JsonProperty(value="parallelism", required=false) private var parallelism:Option[String] = None + @JsonProperty(value="rebalance", required=false) private var rebalance:Option[String] = None override def instantiate(context: Context): RelationTarget = { + val conf = context.flowmanConf RelationTarget( instanceProperties(context), MappingOutputIdentifier.parse(context.evaluate(mapping)), RelationIdentifier.parse(context.evaluate(relation)), - OutputMode.ofString(context.evaluate(mode)), + OutputMode.ofString(context.evaluate(mode).getOrElse(conf.getConf(DEFAULT_TARGET_OUTPUT_MODE))), context.evaluate(partition), - context.evaluate(parallelism).toInt, - context.evaluate(rebalance).toBoolean + context.evaluate(parallelism).map(_.toInt).getOrElse(conf.getConf(DEFAULT_TARGET_PARALLELISM)), + context.evaluate(rebalance).map(_.toBoolean).getOrElse(conf.getConf(DEFAULT_TARGET_REBALANCE)) ) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/SchemaTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/SchemaTarget.scala index f44974952..c9e2f1046 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/SchemaTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/SchemaTarget.scala @@ -24,7 +24,7 @@ import com.dimajix.common.No import com.dimajix.common.Trilean import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.VerificationFailedException import com.dimajix.flowman.model.BaseTarget @@ -66,18 +66,18 @@ case class SchemaTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { phase match { case Phase.BUILD => - val dst = executor.fs.file(file) + val dst = execution.fs.file(file) !dst.exists() case Phase.VERIFY => Yes case Phase.TRUNCATE|Phase.DESTROY => - val dst = executor.fs.file(file) + val dst = execution.fs.file(file) dst.exists() case _ => No } @@ -89,7 +89,7 @@ case class SchemaTarget( * * @param executor */ - override def build(executor: Executor): Unit = { + override def build(executor: Execution): Unit = { require(executor != null) logger.info(s"Writing schema to file '$file'") @@ -102,7 +102,7 @@ case class SchemaTarget( * * @param executor */ - override def verify(executor: Executor): Unit = { + override def verify(executor: Execution): Unit = { require(executor != null) val outputFile = executor.fs.file(file) @@ -117,7 +117,7 @@ case class SchemaTarget( * * @param executor */ - override def truncate(executor: Executor): Unit = { + override def truncate(executor: Execution): Unit = { require(executor != null) val outputFile = executor.fs.file(file) @@ -133,7 +133,7 @@ case class SchemaTarget( * * @param executor */ - override def destroy(executor: Executor): Unit = { + override def destroy(executor: Execution): Unit = { truncate(executor) } } diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/SftpUploadTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/SftpUploadTarget.scala index 235f2bc8c..d99d8c626 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/SftpUploadTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/SftpUploadTarget.scala @@ -38,7 +38,7 @@ import com.dimajix.common.Trilean import com.dimajix.common.Unknown import com.dimajix.common.tryWith import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.model.BaseTarget import com.dimajix.flowman.model.ConnectionIdentifier @@ -117,18 +117,18 @@ case class SftpUploadTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { + override def dirty(execution: Execution, phase: Phase): Trilean = { phase match { case Phase.BUILD => Unknown case _ => No } } - override protected def build(executor:Executor) : Unit = { + override protected def build(executor:Execution) : Unit = { val host = credentials.host val port = Some(credentials.port).filter(_ > 0).getOrElse(22) val fs = executor.fs diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/StreamTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/StreamTarget.scala index 92bb2c5bd..bb0335d57 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/StreamTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/StreamTarget.scala @@ -22,7 +22,7 @@ import org.slf4j.LoggerFactory import com.dimajix.common.Yes import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.MappingUtils import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.execution.Phase @@ -71,7 +71,7 @@ case class StreamTarget( * Creates the empty containing (Hive tabl, SQL table, etc) for holding the data * @param executor */ - override def create(executor: Executor) : Unit = { + override def create(executor: Execution) : Unit = { require(executor != null) val rel = context.getRelation(relation) @@ -91,7 +91,7 @@ case class StreamTarget( * * @param executor */ - override def build(executor: Executor): Unit = { + override def build(executor: Execution): Unit = { logger.info(s"Writing mapping '${this.mapping}' to streaming relation '$relation' using mode '$mode' and checkpoint location '$checkpointLocation'") val mapping = context.getMapping(this.mapping.mapping) val rel = context.getRelation(relation) @@ -104,7 +104,7 @@ case class StreamTarget( * * @param executor */ - override def truncate(executor: Executor): Unit = { + override def truncate(executor: Execution): Unit = { logger.info(s"Cleaining streaming relation '$relation'") val rel = context.getRelation(relation) rel.truncate(executor) @@ -114,7 +114,7 @@ case class StreamTarget( * Destroys both the logical relation and the physical data * @param executor */ - override def destroy(executor: Executor) : Unit = { + override def destroy(executor: Execution) : Unit = { require(executor != null) logger.info(s"Destroying relation '$relation'") diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/TargetSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/TargetSpec.scala index c74516c62..62db952f1 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/TargetSpec.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/TargetSpec.scala @@ -22,11 +22,11 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo import com.fasterxml.jackson.databind.util.StdConverter import com.dimajix.common.TypeRegistry -import com.dimajix.flowman.annotation.TargetType import com.dimajix.flowman.execution.Context import com.dimajix.flowman.model.Target import com.dimajix.flowman.model.TargetIdentifier import com.dimajix.flowman.spec.NamedSpec +import com.dimajix.flowman.spec.annotation.TargetType import com.dimajix.flowman.spi.ClassAnnotationHandler @@ -60,8 +60,10 @@ object TargetSpec extends TypeRegistry[TargetSpec] { new JsonSubTypes.Type(name = "schema", value = classOf[SchemaTargetSpec]), new JsonSubTypes.Type(name = "sftpUpload", value = classOf[SftpUploadTargetSpec]), new JsonSubTypes.Type(name = "stream", value = classOf[StreamTargetSpec]), - new JsonSubTypes.Type(name = "template", value = classOf[TemplateTargetSpec])) -) + new JsonSubTypes.Type(name = "template", value = classOf[TemplateTargetSpec]), + new JsonSubTypes.Type(name = "validate", value = classOf[ValidateTargetSpec]), + new JsonSubTypes.Type(name = "verify", value = classOf[VerifyTargetSpec]) +)) abstract class TargetSpec extends NamedSpec[Target] { @JsonProperty(value = "before", required=false) private var before:Seq[String] = Seq() @JsonProperty(value = "after", required=false) private var after:Seq[String] = Seq() diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/TemplateTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/TemplateTarget.scala index dc0619630..687d114cf 100644 --- a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/TemplateTarget.scala +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/TemplateTarget.scala @@ -20,9 +20,10 @@ import com.fasterxml.jackson.annotation.JsonProperty import com.dimajix.common.Trilean import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.ScopeContext +import com.dimajix.flowman.graph.Linker import com.dimajix.flowman.model.BaseTarget import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.model.Target @@ -30,10 +31,10 @@ import com.dimajix.flowman.model.TargetIdentifier import com.dimajix.flowman.spec.splitSettings -class TemplateTarget( +case class TemplateTarget( override val instanceProperties:Target.Properties, - val target:TargetIdentifier, - val environment:Map[String,String] + target:TargetIdentifier, + environment:Map[String,String] ) extends BaseTarget { private val templateContext = ScopeContext.builder(context) .withEnvironment(environment) @@ -90,26 +91,32 @@ class TemplateTarget( * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], * then an [[execute]] should update the output, such that the target is not 'dirty' any more. * - * @param executor + * @param execution * @param phase * @return */ - override def dirty(executor: Executor, phase: Phase): Trilean = { - targetInstance.dirty(executor, phase) + override def dirty(execution: Execution, phase: Phase): Trilean = { + targetInstance.dirty(execution, phase) } /** * Executes a specific phase of this target * - * @param executor + * @param execution * @param phase */ - override def execute(executor: Executor, phase: Phase): Unit = { - targetInstance.execute(executor, phase) + override def execute(execution: Execution, phase: Phase): Unit = { + targetInstance.execute(execution, phase) } -} - + /** + * Creates all known links for building a descriptive graph of the whole data flow + * Params: linker - The linker object to use for creating new edges + */ + override def link(linker: Linker): Unit = { + targetInstance.link(linker) + } +} class TemplateTargetSpec extends TargetSpec { @@ -123,7 +130,7 @@ class TemplateTargetSpec extends TargetSpec { * @return */ override def instantiate(context: Context): TemplateTarget = { - new TemplateTarget( + TemplateTarget( instanceProperties(context), TargetIdentifier(context.evaluate(target)), splitSettings(environment).toMap diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/ValidateTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/ValidateTarget.scala new file mode 100644 index 000000000..ee21368f8 --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/ValidateTarget.scala @@ -0,0 +1,140 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.target + +import java.time.Clock + +import com.fasterxml.jackson.annotation.JsonProperty +import org.apache.spark.storage.StorageLevel +import org.slf4j.LoggerFactory + +import com.dimajix.common.No +import com.dimajix.common.Trilean +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.execution.Phase +import com.dimajix.flowman.execution.ValidationFailedException +import com.dimajix.flowman.model.Assertion +import com.dimajix.flowman.model.BaseTarget +import com.dimajix.flowman.model.ResourceIdentifier +import com.dimajix.flowman.model.Target +import com.dimajix.flowman.model.TargetInstance +import com.dimajix.flowman.spec.assertion.AssertionSpec +import com.dimajix.flowman.util.ConsoleColors.green +import com.dimajix.flowman.util.ConsoleColors.red +import com.dimajix.spark.sql.DataFrameUtils + + +case class ValidateTarget( + instanceProperties:Target.Properties, + assertions:Map[String,Assertion] = Map() +) extends BaseTarget { + private val logger = LoggerFactory.getLogger(classOf[ValidateTarget]) + + /** + * Returns an instance representing this target with the context + * + * @return + */ + override def instance: TargetInstance = { + // Create a custom instance identifier with a timestamp, such that every run is a new instance. Otherwise + // validation wouldn't be always executed in the presence of a state store. + TargetInstance( + namespace.map(_.name).getOrElse(""), + project.map(_.name).getOrElse(""), + name, + Map("validation_ts" -> Clock.systemUTC().millis().toString) + ) + } + + /** + * Returns all phases which are implemented by this target in the execute method + * + * @return + */ + override def phases : Set[Phase] = Set(Phase.VALIDATE) + + /** + * Returns a list of physical resources required by this target + * + * @return + */ + override def requires(phase: Phase): Set[ResourceIdentifier] = { + phase match { + case Phase.VALIDATE => assertions.flatMap(_._2.requires).toSet + case _ => Set() + } + } + + /** + * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], + * then an [[execute]] should update the output, such that the target is not 'dirty' any more. + * + * @param execution + * @param phase + * @return + */ + override def dirty(execution: Execution, phase: Phase): Trilean = { + phase match { + case Phase.VALIDATE => Yes + case _ => No + } + } + + /** + * Performs a verification of the build step or possibly other checks. + * + * @param executor + */ + override protected def validate(execution: Execution): Unit = { + // Collect all required DataFrames for caching. We assume that each DataFrame might be used in multiple + // assertions and that the DataFrames aren't very huge (we are talking about tests!) + val inputDataFrames = assertions + .flatMap { case(_,instance) => instance.inputs } + .toSeq + .distinct + .map(id => execution.instantiate(context.getMapping(id.mapping), id.output)) + val cacheLevel = StorageLevel.NONE // actually disable caching for now + + DataFrameUtils.withCaches(inputDataFrames, cacheLevel) { + assertions.map { case (name, instance) => + val description = instance.description.getOrElse(name) + + if (execution.assert(instance).exists(r => !r.valid)) { + logger.error(red(s" ✘ failed: $description")) + throw new ValidationFailedException(identifier) + } + else { + logger.info(green(s" ✓ passed: $description")) + } + } + } + } +} + + +class ValidateTargetSpec extends TargetSpec { + @JsonProperty(value = "assertions", required = true) private var assertions: Map[String,AssertionSpec] = Map() + + override def instantiate(context: Context): ValidateTarget = { + ValidateTarget( + instanceProperties(context), + assertions.map {case(name,assertion) => name -> assertion.instantiate(context) } + ) + } +} diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/VerifyTarget.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/VerifyTarget.scala new file mode 100644 index 000000000..f93f8409a --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/target/VerifyTarget.scala @@ -0,0 +1,139 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.target + +import java.time.Clock + +import com.fasterxml.jackson.annotation.JsonProperty +import org.apache.spark.storage.StorageLevel +import org.slf4j.LoggerFactory + +import com.dimajix.common.No +import com.dimajix.common.Trilean +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.execution.Phase +import com.dimajix.flowman.execution.ValidationFailedException +import com.dimajix.flowman.model.Assertion +import com.dimajix.flowman.model.BaseTarget +import com.dimajix.flowman.model.ResourceIdentifier +import com.dimajix.flowman.model.Target +import com.dimajix.flowman.model.TargetInstance +import com.dimajix.flowman.spec.assertion.AssertionSpec +import com.dimajix.flowman.util.ConsoleColors.green +import com.dimajix.flowman.util.ConsoleColors.red +import com.dimajix.spark.sql.DataFrameUtils + + +case class VerifyTarget( + instanceProperties:Target.Properties, + assertions:Map[String,Assertion] = Map() +) extends BaseTarget { + private val logger = LoggerFactory.getLogger(classOf[VerifyTarget]) + + /** + * Returns an instance representing this target with the context + * + * @return + */ + override def instance: TargetInstance = { + // Create a custom instance identifier with a timestamp, such that every run is a new instance. Otherwise + // verification wouldn't be always executed in the presence of a state store. + TargetInstance( + namespace.map(_.name).getOrElse(""), + project.map(_.name).getOrElse(""), + name, + Map("verification_ts" -> Clock.systemUTC().millis().toString) + ) + } + + /** + * Returns all phases which are implemented by this target in the execute method + * @return + */ + override def phases : Set[Phase] = Set(Phase.VERIFY) + + /** + * Returns a list of physical resources required by this target + * + * @return + */ + override def requires(phase: Phase): Set[ResourceIdentifier] = { + phase match { + case Phase.VERIFY => assertions.flatMap(_._2.requires).toSet + case _ => Set() + } + } + + /** + * Returns the state of the target, specifically of any artifacts produces. If this method return [[Yes]], + * then an [[execute]] should update the output, such that the target is not 'dirty' any more. + * + * @param execution + * @param phase + * @return + */ + override def dirty(execution: Execution, phase: Phase): Trilean = { + phase match { + case Phase.VERIFY => Yes + case _ => No + } + } + + /** + * Performs a verification of the build step or possibly other checks. + * + * @param executor + */ + override protected def verify(execution: Execution): Unit = { + // Collect all required DataFrames for caching. We assume that each DataFrame might be used in multiple + // assertions and that the DataFrames aren't very huge (we are talking about tests!) + val inputDataFrames = assertions + .flatMap { case(_,instance) => instance.inputs } + .toSeq + .distinct + .map(id => execution.instantiate(context.getMapping(id.mapping), id.output)) + val cacheLevel = StorageLevel.NONE // actually disable caching for now + + DataFrameUtils.withCaches(inputDataFrames, cacheLevel) { + assertions.map { case (name, instance) => + val description = instance.description.getOrElse(name) + + if (execution.assert(instance).exists(r => !r.valid)) { + logger.error(red(s" ✘ failed: $description")) + throw new ValidationFailedException(identifier) + } + else { + logger.info(green(s" ✓ passed: $description")) + } + } + } + } +} + + +class VerifyTargetSpec extends TargetSpec { + @JsonProperty(value = "assertions", required = true) private var assertions: Map[String,AssertionSpec] = Map() + + override def instantiate(context: Context): VerifyTarget = { + VerifyTarget( + instanceProperties(context), + assertions.map {case(name,assertion) => name -> assertion.instantiate(context) } + ) + } +} diff --git a/flowman-spec/src/main/scala/com/dimajix/flowman/spec/test/TestSpec.scala b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/test/TestSpec.scala new file mode 100644 index 000000000..9371c9c0f --- /dev/null +++ b/flowman-spec/src/main/scala/com/dimajix/flowman/spec/test/TestSpec.scala @@ -0,0 +1,93 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.test + +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.databind.annotation.JsonDeserialize +import com.fasterxml.jackson.databind.util.StdConverter + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.model.TargetIdentifier +import com.dimajix.flowman.model.Test +import com.dimajix.flowman.model.TestIdentifier +import com.dimajix.flowman.spec.NamedSpec +import com.dimajix.flowman.spec.assertion.AssertionSpec +import com.dimajix.flowman.spec.mapping.MappingSpec +import com.dimajix.flowman.spec.relation.RelationSpec +import com.dimajix.flowman.spec.splitSettings +import com.dimajix.flowman.spec.target.TargetSpec + + +object TestSpec { + class NameResolver extends StdConverter[Map[String, TestSpec], Map[String, TestSpec]] { + override def convert(value: Map[String, TestSpec]): Map[String, TestSpec] = { + value.foreach(kv => kv._2.name = kv._1) + value + } + } +} + + +class TestSpec extends NamedSpec[Test] { + @JsonProperty(value="extends") private var parents:Seq[String] = Seq() + @JsonProperty(value="description") private var description:Option[String] = None + @JsonProperty(value="environment") private var environment: Seq[String] = Seq() + @JsonProperty(value="targets") private var targets: Seq[String] = Seq() + @JsonDeserialize(converter=classOf[TargetSpec.NameResolver]) + @JsonProperty(value="fixtures") private var fixtures: Map[String,TargetSpec] = Map() + @JsonDeserialize(converter=classOf[MappingSpec.NameResolver]) + @JsonProperty(value="overrideMappings") private var overrideMappings: Map[String,MappingSpec] = Map() + @JsonDeserialize(converter=classOf[RelationSpec.NameResolver]) + @JsonProperty(value="overrideRelations") private var overrideRelations: Map[String,RelationSpec] = Map() + @JsonDeserialize(converter=classOf[AssertionSpec.NameResolver]) + @JsonProperty(value="assertions") private var assertions: Map[String,AssertionSpec] = Map() + + override def instantiate(context: Context): Test = { + require(context != null) + + val parents = this.parents.map(job => context.getTest(TestIdentifier(job))) + val test = Test( + instanceProperties(context), + environment = splitSettings(environment).toMap, + targets = targets.map(context.evaluate).map(TargetIdentifier.parse), + fixtures = fixtures, + overrideMappings = overrideMappings, + overrideRelations = overrideRelations, + assertions = assertions + ) + + Test.merge(test, parents) + } + + /** + * Returns a set of common properties + * + * @param context + * @return + */ + override protected def instanceProperties(context: Context): Test.Properties = { + require(context != null) + Test.Properties( + context, + context.namespace, + context.project, + name, + context.evaluate(labels), + description.map(context.evaluate) + ) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/JacksonSubtypeTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/JacksonSubtypeTest.scala index d7801b746..121c47060 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/JacksonSubtypeTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/JacksonSubtypeTest.scala @@ -16,6 +16,7 @@ package com.dimajix.flowman +import com.fasterxml.jackson.annotation.JsonProperty import com.fasterxml.jackson.annotation.JsonSubTypes import com.fasterxml.jackson.annotation.JsonSubTypes.Type import com.fasterxml.jackson.annotation.JsonTypeInfo @@ -23,11 +24,10 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo.As import com.fasterxml.jackson.annotation.JsonTypeInfo.Id import com.fasterxml.jackson.annotation.JsonTypeName import com.fasterxml.jackson.databind.ObjectMapper -import com.fasterxml.jackson.annotation.JsonProperty import com.fasterxml.jackson.databind.jsontype.NamedType import com.fasterxml.jackson.module.scala.DefaultScalaModule -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers @JsonTypeInfo(use = Id.NAME, include = As.PROPERTY, property = "type") @@ -49,7 +49,7 @@ class FooWrapper { -class JacksonSubtypeTest extends FlatSpec with Matchers { +class JacksonSubtypeTest extends AnyFlatSpec with Matchers { private val WRAPPED_ANNOTATED_FOO ="""{"foo": {"type":"annotated"}}""" private val WRAPPED_REGISTERED_FOO ="""{"foo": {"type":"registered"}}""" private val ANNOTATED_FOO = "{\"type\": \"annotated\"}" diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/JacksonTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/JacksonTest.scala index 55aa66c60..274db7801 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/JacksonTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/JacksonTest.scala @@ -6,8 +6,9 @@ import com.fasterxml.jackson.annotation.JsonProperty import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.dataformat.yaml.YAMLFactory import com.fasterxml.jackson.module.scala.DefaultScalaModule -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + object JacksonTest { @@ -37,7 +38,7 @@ object JacksonTest { } -class JacksonTest extends FlatSpec with Matchers { +class JacksonTest extends AnyFlatSpec with Matchers { val mapper = new ObjectMapper(new YAMLFactory()) mapper.registerModule(DefaultScalaModule) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ModuleTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ModuleTest.scala index b173f1695..217a8b49c 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ModuleTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ModuleTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.RootContext @@ -27,7 +27,8 @@ import com.dimajix.flowman.model.JobIdentifier import com.dimajix.flowman.model.Module import com.dimajix.spark.testing.LocalSparkSession -class ModuleTest extends FlatSpec with Matchers with LocalSparkSession { + +class ModuleTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The Module" should "be loadable from a string" in { val spec = """ @@ -80,7 +81,7 @@ class ModuleTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("default") val session = Session.builder().withSparkSession(spark).build() val context = session.getContext(project) - val executor = session.executor + val executor = session.execution val runner = session.runner val job = context.getJob(JobIdentifier("default")) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/NamespaceTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/NamespaceTest.scala index 208de7555..cecd01391 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/NamespaceTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/NamespaceTest.scala @@ -16,13 +16,13 @@ package com.dimajix.flowman.spec -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.model.Namespace -class NamespaceTest extends FlatSpec with Matchers { +class NamespaceTest extends AnyFlatSpec with Matchers { "A Namespace" should "be creatable from a spec" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ProjectTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ProjectTest.scala index b96d1f216..4ee37e925 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ProjectTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ProjectTest.scala @@ -19,14 +19,14 @@ package com.dimajix.flowman.spec import com.google.common.io.Resources import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.Path -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.hadoop.FileSystem import com.dimajix.flowman.model.Project -class ProjectTest extends FlatSpec with Matchers { +class ProjectTest extends AnyFlatSpec with Matchers { "A Project" should "be parseable from a string" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ValueOrRangeTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ValueOrRangeTest.scala index 550e3a725..1766ee64d 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ValueOrRangeTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/ValueOrRangeTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.types.ArrayValue import com.dimajix.flowman.types.FieldValue @@ -25,7 +25,7 @@ import com.dimajix.flowman.types.RangeValue import com.dimajix.flowman.types.SingleValue -class ValueOrRangeTest extends FlatSpec with Matchers { +class ValueOrRangeTest extends AnyFlatSpec with Matchers { "A FieldValue" should "be readable from a single value" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/assertion/AssertionSpecTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/assertion/AssertionSpecTest.scala new file mode 100644 index 000000000..4755bed85 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/assertion/AssertionSpecTest.scala @@ -0,0 +1,54 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.assertion + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Assertion +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.model.TestIdentifier +import com.dimajix.flowman.spec.annotation.RelationType + + +@RelationType(kind = "annotatedAssertion") +class AnnotationAssertionSpec extends AssertionSpec { + override def instantiate(context: Context): Assertion = ??? +} + +class AssertionSpecTest extends AnyFlatSpec with Matchers { + "AssertionSpec" should "support custom assertions" in { + val spec = + """ + |tests: + | main: + | assertions: + | custom: + | kind: annotatedAssertion + | description: This is a test + """.stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().build() + val context = session.getContext(project) + + val test = context.getTest(TestIdentifier("main")) + test.assertions("custom") shouldBe a[AnnotationAssertionSpec] + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/assertion/SqlAssertionTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/assertion/SqlAssertionTest.scala new file mode 100644 index 000000000..f6e86f9f0 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/assertion/SqlAssertionTest.scala @@ -0,0 +1,234 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.assertion + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.RootContext +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Assertion +import com.dimajix.flowman.model.AssertionResult +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.spec.ObjectMapper +import com.dimajix.spark.testing.LocalSparkSession + + +class SqlAssertionTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "The SqlAssertion" should "be parseable" in { + val spec = + """ + |kind: sql + |tests: + | - query: SELECT * FROM lala + | expected: A + | - query: SELECT * FROM lala + | expected: [A] + | - query: SELECT * FROM lolo + | expected: [A,2] + | - query: SELECT * FROM lolo + | expected: [[A,2]] + | - query: SELECT * FROM lolo + | expected: + | - A + | - B + | - query: SELECT * FROM lolo + | expected: + | - [A] + | - [B] + | - query: SELECT * FROM lolo + | expected: + | - [A,1] + | - [B,2] + |""".stripMargin + + val assertionSpec = ObjectMapper.parse[AssertionSpec](spec) + assertionSpec shouldBe a[SqlAssertionSpec] + + val context = RootContext.builder().build() + val assertion = assertionSpec.instantiate(context).asInstanceOf[SqlAssertion] + assertion.name should be ("") + assertion.tests should be (Seq( + SqlAssertion.Case( + query = "SELECT * FROM lala", + expected = Seq(Array("A")) + ), + SqlAssertion.Case( + query = "SELECT * FROM lala", + expected = Seq(Array("A")) + ), + SqlAssertion.Case( + query = "SELECT * FROM lolo", + expected = Seq(Array("A"), Array("2")) + ), + SqlAssertion.Case( + query = "SELECT * FROM lolo", + expected = Seq(Array("A", "2")) + ), + SqlAssertion.Case( + query = "SELECT * FROM lolo", + expected = Seq(Array("A"), Array("B")) + ), + SqlAssertion.Case( + query = "SELECT * FROM lolo", + expected = Seq(Array("A"), Array("B")) + ), + SqlAssertion.Case( + query = "SELECT * FROM lolo", + expected = Seq(Array("A", "1"), Array("B", "2")) + ) + )) + assertion.inputs should be (Seq(MappingOutputIdentifier("lala"), MappingOutputIdentifier("lolo"))) + assertion.requires should be (Set()) + } + + it should "support a single top level test" in { + val spec = + """ + |kind: sql + |query: SELECT * FROM lala + |expected: A + |tests: + | - query: SELECT * FROM lolo + | expected: [A] + |""".stripMargin + + val assertionSpec = ObjectMapper.parse[AssertionSpec](spec) + assertionSpec shouldBe a[SqlAssertionSpec] + + val context = RootContext.builder().build() + val assertion = assertionSpec.instantiate(context).asInstanceOf[SqlAssertion] + assertion.name should be ("") + assertion.tests should be (Seq( + SqlAssertion.Case( + query = "SELECT * FROM lala", + expected = Seq(Array("A")) + ), + SqlAssertion.Case( + query = "SELECT * FROM lolo", + expected = Seq(Array("A")) + ) + )) + assertion.inputs should be (Seq(MappingOutputIdentifier("lala"), MappingOutputIdentifier("lolo"))) + assertion.requires should be (Set()) + + } + + it should "work" in { + val session = Session.builder().withSparkSession(spark).build() + val context = session.context + val execution = session.execution + + val assertion = SqlAssertion( + Assertion.Properties(context), + Seq( + SqlAssertion.Case( + query = "SELECT COUNT(*), SUM(id) FROM mx", + expected = Seq(Array("2", "1")) + ), + SqlAssertion.Case( + query = "SELECT COUNT(*) FROM my", + expected = Seq(Array("3")) + ) + ) + ) + + assertion.inputs should be (Seq(MappingOutputIdentifier("mx"), MappingOutputIdentifier("my"))) + assertion.requires should be (Set()) + + val mx = execution.spark.range(2).toDF() + val my = execution.spark.range(3).toDF() + + val result = assertion.execute(execution, Map(MappingOutputIdentifier("mx") -> mx, MappingOutputIdentifier("my") -> my)) + result should be (Seq( + AssertionResult("SELECT COUNT(*), SUM(id) FROM mx", true), + AssertionResult("SELECT COUNT(*) FROM my", true) + )) + } + + it should "fail on too many columns" in { + val session = Session.builder().withSparkSession(spark).build() + val context = session.context + val execution = session.execution + + val assertion = SqlAssertion( + Assertion.Properties(context), + Seq( + SqlAssertion.Case( + query = "SELECT COUNT(*),SUM(id) FROM mx", + expected = Seq(Array("2", "1", "3")) + ) + ) + ) + + assertion.inputs should be (Seq(MappingOutputIdentifier("mx"))) + assertion.requires should be (Set()) + + val mx = execution.spark.range(2).toDF() + + val result = assertion.execute(execution, Map(MappingOutputIdentifier("mx") -> mx)) + result should be (Seq(AssertionResult("SELECT COUNT(*),SUM(id) FROM mx", false))) + } + + it should "fail on too few columns" in { + val session = Session.builder().withSparkSession(spark).build() + val context = session.context + val execution = session.execution + + val assertion = SqlAssertion( + Assertion.Properties(context), + Seq( + SqlAssertion.Case( + query = "SELECT COUNT(*),SUM(id) FROM mx", + expected = Seq(Array("2")) + ) + ) + ) + + assertion.inputs should be (Seq(MappingOutputIdentifier("mx"))) + assertion.requires should be (Set()) + + val mx = execution.spark.range(2).toDF() + + val result = assertion.execute(execution, Map(MappingOutputIdentifier("mx") -> mx)) + result should be (Seq(AssertionResult("SELECT COUNT(*),SUM(id) FROM mx", false))) + } + + it should "fail on wrong column types" in { + val session = Session.builder().withSparkSession(spark).build() + val context = session.context + val execution = session.execution + + val assertion = SqlAssertion( + Assertion.Properties(context), + Seq( + SqlAssertion.Case( + query = "SELECT COUNT(*),SUM(id) FROM mx", + expected = Seq(Array("2.0")) + ) + ) + ) + + assertion.inputs should be (Seq(MappingOutputIdentifier("mx"))) + assertion.requires should be (Set()) + + val mx = execution.spark.range(2).toDF() + + val result = assertion.execute(execution, Map(MappingOutputIdentifier("mx") -> mx)) + result should be (Seq(AssertionResult("SELECT COUNT(*),SUM(id) FROM mx", false))) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/connection/JdbcConnectionTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/connection/JdbcConnectionTest.scala index 97f6ea5ef..a5d8ad052 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/connection/JdbcConnectionTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/connection/JdbcConnectionTest.scala @@ -16,14 +16,14 @@ package com.dimajix.flowman.spec.connection -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.spec.ObjectMapper -class JdbcConnectionTest extends FlatSpec with Matchers { +class JdbcConnectionTest extends AnyFlatSpec with Matchers { "A JdbcConnction" should "be parseable" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/connection/SshConnectionTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/connection/SshConnectionTest.scala index f801cc4ad..0d20bddb0 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/connection/SshConnectionTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/connection/SshConnectionTest.scala @@ -16,14 +16,14 @@ package com.dimajix.flowman.spec.connection -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.spec.ObjectMapper -class SshConnectionTest extends FlatSpec with Matchers { +class SshConnectionTest extends AnyFlatSpec with Matchers { "A SshConnction" should "be parseable" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/dataset/MappingDatasetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/dataset/MappingDatasetTest.scala new file mode 100644 index 000000000..7a1f881f7 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/dataset/MappingDatasetTest.scala @@ -0,0 +1,98 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.dataset + +import org.apache.hadoop.fs.Path +import org.apache.spark.sql.DataFrame +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.execution.OutputMode +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.BaseMapping +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.ResourceIdentifier +import com.dimajix.flowman.model.Template +import com.dimajix.flowman.spec.ObjectMapper +import com.dimajix.flowman.spec.dataset.MappingDatasetTest.DummyMappingSpec +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +object MappingDatasetTest { + case class DummyMapping( + override val context:Context, + override val name:String, + override val requires: Set[ResourceIdentifier] + ) extends BaseMapping { + protected override def instanceProperties: Mapping.Properties = Mapping.Properties(context, name) + + override def inputs: Seq[MappingOutputIdentifier] = Seq() + override def execute(execution: Execution, input: Map[MappingOutputIdentifier, DataFrame]): Map[String, DataFrame] = Map("main" -> execution.spark.emptyDataFrame) + override def describe(execution: Execution, input: Map[MappingOutputIdentifier, StructType]): Map[String, StructType] = Map("main"-> new StructType()) + } + + case class DummyMappingSpec( + name: String, + requires: Set[ResourceIdentifier] + ) extends Template[Mapping] { + override def instantiate(context: Context): Mapping = DummyMapping(context, name, requires) + } +} + +class MappingDatasetTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "The MappingDataset" should "be parsable" in { + val spec = + """ + |kind: mapping + |""".stripMargin + val ds = ObjectMapper.parse[DatasetSpec](spec) + ds shouldBe a[MappingDatasetSpec] + } + + it should "work" in { + val project = Project( + name="test", + mappings = Map("mapping" -> DummyMappingSpec( + "mapping", + Set(ResourceIdentifier.ofFile(new Path("file1"))) + )) + ) + + val session = Session.builder.withSparkSession(spark).build() + val context = session.getContext(project) + val executor = session.execution + + val dataset = MappingDataset( + context, + MappingOutputIdentifier("mapping") + ) + + dataset.provides should be (Set()) + dataset.requires should be (Set(ResourceIdentifier.ofFile(new Path("file1")))) + dataset.exists(executor) should be (Yes) + an[UnsupportedOperationException] should be thrownBy(dataset.clean(executor)) + dataset.read(executor, None).count() should be (0) + an[UnsupportedOperationException] should be thrownBy(dataset.write(executor, null, OutputMode.APPEND)) + dataset.describe(executor) should be (Some(new StructType())) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/dataset/RelationDatasetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/dataset/RelationDatasetTest.scala new file mode 100644 index 000000000..991371346 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/dataset/RelationDatasetTest.scala @@ -0,0 +1,102 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.dataset + +import org.apache.hadoop.fs.Path +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.OutputMode +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.Relation +import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.ResourceIdentifier +import com.dimajix.flowman.model.Template +import com.dimajix.flowman.spec.ObjectMapper +import com.dimajix.flowman.types.SingleValue +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +object RelationDatasetTest { +} + +class RelationDatasetTest extends AnyFlatSpec with Matchers with MockFactory with LocalSparkSession { + "The RelationDataset" should "be parsable" in { + val spec = + """ + |kind: relation + |""".stripMargin + val ds = ObjectMapper.parse[DatasetSpec](spec) + ds shouldBe a[RelationDatasetSpec] + } + + it should "work" in { + val relation = mock[Relation] + val relationSpec = mock[Template[Relation]] + (relationSpec.instantiate _).expects(*).returns(relation) + + val project = Project( + name="test", + relations = Map("relation" -> relationSpec) + ) + + val session = Session.builder.withSparkSession(spark).build() + val context = session.getContext(project) + val executor = session.execution + + val dataset = RelationDataset( + context, + RelationIdentifier("relation"), + Map[String,SingleValue]() + ) + + (relation.provides _).expects().returns(Set(ResourceIdentifier.ofFile(new Path("provided_file")))) + (relation.resources _).expects(*).returns(Set(ResourceIdentifier.ofFile(new Path("partition_file")))) + dataset.provides should be (Set( + ResourceIdentifier.ofFile(new Path("provided_file")), + ResourceIdentifier.ofFile(new Path("partition_file")) + )) + + (relation.requires _).expects().returns(Set(ResourceIdentifier.ofFile(new Path("required_file")))) + (relation.provides _).expects().returns(Set(ResourceIdentifier.ofFile(new Path("provided_file")))) + (relation.resources _).expects(*).returns(Set(ResourceIdentifier.ofFile(new Path("partition_file")))) + dataset.requires should be (Set( + ResourceIdentifier.ofFile(new Path("required_file")), + ResourceIdentifier.ofFile(new Path("provided_file")), + ResourceIdentifier.ofFile(new Path("partition_file")) + )) + + (relation.loaded _).expects(executor,*).returns(Yes) + dataset.exists(executor) should be (Yes) + + (relation.truncate _).expects(executor,*).returns(Unit) + dataset.clean(executor) + + (relation.read _).expects(executor,None,*).returns(null) + dataset.read(executor, None) + + (relation.write _).expects(executor,spark.emptyDataFrame,*,OutputMode.APPEND).returns(Unit) + dataset.write(executor, spark.emptyDataFrame, OutputMode.APPEND) + + (relation.describe _).expects(executor).returns(new StructType()) + dataset.describe(executor) should be (Some(new StructType())) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/dataset/ValuesDatasetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/dataset/ValuesDatasetTest.scala new file mode 100644 index 000000000..b11946fc0 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/dataset/ValuesDatasetTest.scala @@ -0,0 +1,206 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.dataset + +import org.apache.spark.sql.Row +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Dataset +import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.spec.ObjectMapper +import com.dimajix.flowman.spec.schema.EmbeddedSchema +import com.dimajix.flowman.types.ArrayRecord +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.IntegerType +import com.dimajix.flowman.types.StringType +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class ValuesDatasetTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "The ValuesDataset" should "be parsable with columns" in { + val spec = + """ + |kind: values + |columns: + | col_a: string + | col_b: int + |records: + | - ["a",1] + | - ["b",2] + |""".stripMargin + + val session = Session.builder().build() + val context = session.context + + val ds = ObjectMapper.parse[DatasetSpec](spec) + ds shouldBe a[ValuesDatasetSpec] + + val dataset = ds.instantiate(context).asInstanceOf[ValuesDataset] + dataset.category should be ("dataset") + dataset.kind should be ("values") + dataset.records should be (Seq( + ArrayRecord("a","1"), + ArrayRecord("b", "2") + )) + } + + it should "be parseable with a schema" in { + val spec = + """ + |kind: values + |records: + | - ["a",12,3] + | - [cat,"",7] + | - [dog,null,8] + |schema: + | kind: embedded + | fields: + | - name: str_col + | type: string + | - name: int_col + | type: integer + |""".stripMargin + + val session = Session.builder().build() + val context = session.context + + val ds = ObjectMapper.parse[DatasetSpec](spec) + ds shouldBe a[ValuesDatasetSpec] + + val dataset = ds.instantiate(context).asInstanceOf[ValuesDataset] + + dataset.category should be ("dataset") + dataset.kind should be ("values") + dataset.records should be (Seq( + ArrayRecord("a","12","3"), + ArrayRecord("cat","","7"), + ArrayRecord("dog",null,"8") + )) + } + + it should "work with specified records and schema" in { + val schema = new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + + val session = Session.builder().withSparkSession(spark).build() + val context = session.context + val executor = session.execution + + val dataset = ValuesDataset( + Dataset.Properties(context, "const"), + schema = Some(EmbeddedSchema( + Schema.Properties(context), + fields = schema.fields + )), + records = Seq( + ArrayRecord("lala","12"), + ArrayRecord("lolo","13"), + ArrayRecord("",null) + ) + ) + + dataset.describe(executor) should be (Some(schema)) + dataset.exists(executor) should be (Yes) + dataset.requires should be (Set()) + dataset.provides should be (Set()) + an[UnsupportedOperationException] should be thrownBy(dataset.clean(executor)) + an[UnsupportedOperationException] should be thrownBy(dataset.write(executor, spark.emptyDataFrame)) + + val df = dataset.read(executor, None) + df.schema should be (schema.sparkType) + df.collect() should be (Seq( + Row("lala", 12), + Row("lolo", 13), + Row(null,null) + )) + } + + it should "work with specified records and columns" in { + val schema = new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + + val session = Session.builder().withSparkSession(spark).build() + val context = session.context + val executor = session.execution + + val dataset = ValuesDataset( + Dataset.Properties(context, "const"), + columns = schema.fields, + records = Seq( + ArrayRecord("lala","12"), + ArrayRecord("lolo","13"), + ArrayRecord("",null) + ) + ) + + dataset.describe(executor) should be (Some(schema)) + dataset.exists(executor) should be (Yes) + dataset.requires should be (Set()) + dataset.provides should be (Set()) + an[UnsupportedOperationException] should be thrownBy(dataset.clean(executor)) + an[UnsupportedOperationException] should be thrownBy(dataset.write(executor, spark.emptyDataFrame)) + + val df = dataset.read(executor, None) + df.schema should be (schema.sparkType) + df.collect() should be (Seq( + Row("lala", 12), + Row("lolo", 13), + Row(null,null) + )) + } + + it should "create a DataFrame with specified schema" in { + val schema = new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + + val session = Session.builder().withSparkSession(spark).build() + val context = session.context + val executor = session.execution + + val dataset = ValuesDataset( + Dataset.Properties(context, "const"), + columns = schema.fields, + records = Seq( + ArrayRecord("lala","12"), + ArrayRecord("lolo","13"), + ArrayRecord("",null) + ) + ) + + val readSchema = org.apache.spark.sql.types.StructType(Seq( + org.apache.spark.sql.types.StructField("str_col", org.apache.spark.sql.types.StringType), + org.apache.spark.sql.types.StructField("other_col", org.apache.spark.sql.types.DoubleType) + )) + val df = dataset.read(executor, Some(readSchema)) + df.schema should be (readSchema) + df.collect() should be (Seq( + Row("lala", null), + Row("lolo", null), + Row(null,null) + )) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/history/JdbcStateStoreTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/history/JdbcStateStoreTest.scala index 457e99db0..8cc95e6cf 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/history/JdbcStateStoreTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/history/JdbcStateStoreTest.scala @@ -20,15 +20,15 @@ import java.nio.file.Files import java.nio.file.Path import org.scalatest.BeforeAndAfter -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.NoSuchConnectionException import com.dimajix.flowman.execution.Session import com.dimajix.flowman.spec.ObjectMapper -class JdbcStateStoreTest extends FlatSpec with Matchers with BeforeAndAfter { +class JdbcStateStoreTest extends AnyFlatSpec with Matchers with BeforeAndAfter { var tempDir:Path = _ before { diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/history/NullStateStoreTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/history/NullStateStoreTest.scala index 8051f4a43..5d0c1d810 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/history/NullStateStoreTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/history/NullStateStoreTest.scala @@ -16,13 +16,13 @@ package com.dimajix.flowman.spec.history -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.spec.ObjectMapper -class NullStateStoreTest extends FlatSpec with Matchers { +class NullStateStoreTest extends AnyFlatSpec with Matchers { "The NullStateStoreSpec" should "be parseable" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/hook/WebHookTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/hook/WebHookTest.scala index 4201d32d5..26f61a91b 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/hook/WebHookTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/hook/WebHookTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec.hook -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Context import com.dimajix.flowman.execution.Phase @@ -36,7 +36,7 @@ import com.dimajix.flowman.spec.target.NullTargetSpec import com.dimajix.flowman.types.StringType -class WebHookTest extends FlatSpec with Matchers { +class WebHookTest extends AnyFlatSpec with Matchers { "The WebHook" should "provide a working job API" in { val session = Session.builder() .withEnvironment("env", "some_environment") diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/job/JobTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/job/JobTest.scala index b72a3847e..782c4fa09 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/job/JobTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/job/JobTest.scala @@ -16,15 +16,12 @@ package com.dimajix.flowman.spec.job -import org.mockito.ArgumentMatchers.any -import org.mockito.Mockito.verify -import org.scalatest.FlatSpec -import org.scalatest.Matchers -import org.scalatestplus.mockito.MockitoSugar +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -import com.dimajix.flowman.annotation.TargetType import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Executor +import com.dimajix.flowman.execution.Execution import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.Session import com.dimajix.flowman.execution.Status @@ -36,11 +33,15 @@ import com.dimajix.flowman.model.JobWrapper import com.dimajix.flowman.model.Module import com.dimajix.flowman.model.NamespaceWrapper import com.dimajix.flowman.model.ProjectWrapper +import com.dimajix.flowman.model.RelationIdentifier import com.dimajix.flowman.model.Target import com.dimajix.flowman.model.TargetIdentifier +import com.dimajix.flowman.spec.annotation.TargetType +import com.dimajix.flowman.spec.relation.MockRelation import com.dimajix.flowman.spec.target.TargetSpec import com.dimajix.flowman.types.StringType + object GrabEnvironmentTarget { var environment:Map[String,Any] = Map() } @@ -51,7 +52,7 @@ case class GrabEnvironmentTarget(instanceProperties:Target.Properties) extends B * * @param executor */ - override def build(executor: Executor): Unit = { + override def build(executor: Execution): Unit = { GrabEnvironmentTarget.environment = context.environment.toMap } } @@ -62,24 +63,30 @@ class GrabEnvironmentTargetSpec extends TargetSpec { } -class JobTest extends FlatSpec with Matchers with MockitoSugar { +class JobTest extends AnyFlatSpec with Matchers with MockFactory { "A Job" should "be deseializable from" in { val spec = """ |targets: | grabenv: | kind: grabenv + | |jobs: | job: + | description: Some Job | targets: | - grabenv """.stripMargin - val module = Module.read.string(spec) + val project = Module.read.string(spec).toProject("project") val session = Session.builder().build() + val context = session.getContext(project) - val job = module.jobs("job") - job should not be (null) + val job = context.getJob(JobIdentifier("job")) + job.name should be ("job") + job.identifier should be (JobIdentifier("project/job")) + job.description should be (Some("Some Job")) + job.targets should be (Seq(TargetIdentifier("grabenv"))) } it should "support parameters" in { @@ -103,7 +110,7 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withProject(project).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val job = project.jobs("job").instantiate(context) @@ -117,7 +124,8 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { "p1" -> "v1", "p2" -> "v2", "p3" -> 7, - "force" -> false) + "force" -> false, + "dryRun" -> false) ) job.execute(executor, Phase.BUILD, Map("p1" -> "v1", "p2" -> "vx")) shouldBe (Status.SUCCESS) @@ -128,7 +136,8 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { "p1" -> "v1", "p2" -> "vx", "p3" -> 7, - "force" -> false) + "force" -> false, + "dryRun" -> false) ) } @@ -150,19 +159,20 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withProject(project).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val job = project.jobs("job").instantiate(context) job should not be (null) - job.execute(executor, Phase.BUILD, Map("p1" -> "2"), false) shouldBe (Status.SUCCESS) + job.execute(executor, Phase.BUILD, Map("p1" -> "2"), force=false) shouldBe (Status.SUCCESS) GrabEnvironmentTarget.environment should be (Map( "job" -> JobWrapper(job), "project" -> ProjectWrapper(project), "namespace" -> NamespaceWrapper(None), "p1" -> "2", - "force" -> false) + "force" -> false, + "dryRun" -> false) ) } @@ -183,19 +193,20 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withProject(project).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val job = project.jobs("job").instantiate(context) job should not be (null) - job.execute(executor, Phase.BUILD, Map("p1" -> "2"), false) shouldBe (Status.SUCCESS) + job.execute(executor, Phase.BUILD, Map("p1" -> "2"), force=false) shouldBe (Status.SUCCESS) GrabEnvironmentTarget.environment should be (Map( "job" -> JobWrapper(job), "project" -> ProjectWrapper(project), "namespace" -> NamespaceWrapper(None), "p1" -> 2, - "force" -> false) + "force" -> false, + "dryRun" -> false) ) } @@ -210,7 +221,7 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { val module = Module.read.string(spec) val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val job = module.jobs("job").instantiate(session.context) job should not be (null) @@ -231,7 +242,7 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { val module = Module.read.string(spec) val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val job = module.jobs("job").instantiate(session.context) job should not be (null) @@ -250,7 +261,7 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { val module = Module.read.string(spec) val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val job = module.jobs("job").instantiate(session.context) job should not be (null) @@ -301,13 +312,13 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withProject(project).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val job = project.jobs("job").instantiate(context) job should not be (null) - job.execute(executor, Phase.BUILD, Map("p1" -> "v1"), false) shouldBe (Status.SUCCESS) + job.execute(executor, Phase.BUILD, Map("p1" -> "v1"), force=false) shouldBe (Status.SUCCESS) GrabEnvironmentTarget.environment should be (Map( "job" -> JobWrapper(job), "project" -> ProjectWrapper(project), @@ -315,7 +326,8 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { "p1" -> "v1", "p2" -> "v1", "p3" -> "xxv1yy", - "force" -> false) + "force" -> false, + "dryRun" -> false) ) } @@ -349,7 +361,7 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withProject(project).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val job = project.jobs("job").instantiate(context) @@ -358,7 +370,7 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { job.parameters should be (Seq(Job.Parameter("p1", StringType))) job.environment should be (Map("p2" -> "$p1", "p3" -> "xx${p2}yy")) - job.execute(executor, Phase.BUILD, Map("p1" -> "v1"), false) shouldBe (Status.SUCCESS) + job.execute(executor, Phase.BUILD, Map("p1" -> "v1"), force=false) shouldBe (Status.SUCCESS) GrabEnvironmentTarget.environment should be (Map( "job" -> JobWrapper(job), "project" -> ProjectWrapper(project), @@ -366,7 +378,8 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { "p1" -> "v1", "p2" -> "v1", "p3" -> "xxv1yy", - "force" -> false) + "force" -> false, + "dryRun" -> false) ) } @@ -400,19 +413,20 @@ class JobTest extends FlatSpec with Matchers with MockitoSugar { val project = Module.read.string(spec).toProject("default") val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val metricSystem = executor.metrics - val metricSink = mock[MetricSink] + val metricSink = stub[MetricSink] + (metricSink.addBoard _).when(*,*).returns(Unit) + (metricSink.commit _).when(*,*).returns(Unit) + (metricSink.removeBoard _).when(*).returns(Unit) + metricSystem.addSink(metricSink) val job = context.getJob(JobIdentifier("main")) job.labels should be (Map("job_label" -> "xyz")) session.runner.executeJob(job, Seq(Phase.BUILD), Map("p1" -> "v1")) shouldBe (Status.SUCCESS) - verify(metricSink).addBoard(any(), any()) - verify(metricSink).commit(any(), any()) - verify(metricSink).removeBoard(any()) } } diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AggregateMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AggregateMappingTest.scala index 898d22077..de169dbfd 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AggregateMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AggregateMappingTest.scala @@ -22,18 +22,18 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping -import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.spark.testing.LocalSparkSession -class AggregateMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class AggregateMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The Aggregation" should "group and aggregate data" in { val df = spark.createDataFrame(Seq( ("c1_v1", "c2_v1", 12, 23.0), @@ -43,7 +43,7 @@ class AggregateMappingTest extends FlatSpec with Matchers with LocalSparkSession )) val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val xfs = AggregateMapping( Mapping.Properties(session.context), @@ -96,7 +96,7 @@ class AggregateMappingTest extends FlatSpec with Matchers with LocalSparkSession val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) project.mappings.size should be (2) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AliasMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AliasMappingTest.scala index e40f065f1..547679dc4 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AliasMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AliasMappingTest.scala @@ -16,17 +16,18 @@ package com.dimajix.flowman.spec.mapping -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.spark.testing.LocalSparkSession -class AliasMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class AliasMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "An AliasMapping" should "be parseable" in { val spec = """ @@ -40,11 +41,16 @@ class AliasMappingTest extends FlatSpec with Matchers with LocalSparkSession { val mapping = project.mappings("my_alias") mapping shouldBe an[AliasMappingSpec] + + val session = Session.builder().build() + val context = session.getContext(project) + val instance = context.getMapping(MappingIdentifier("my_alias")) + instance shouldBe an[AliasMapping] } it should "support different outputs" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = AliasMapping( Mapping.Properties(session.context), diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AssembleMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AssembleMappingTest.scala index 31b6b5cbe..28044f073 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AssembleMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/AssembleMappingTest.scala @@ -22,8 +22,8 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping @@ -35,12 +35,13 @@ import com.dimajix.flowman.spec.mapping.AssembleMapping.LiftEntry import com.dimajix.flowman.spec.mapping.AssembleMapping.NestEntry import com.dimajix.flowman.spec.mapping.AssembleMapping.RenameEntry import com.dimajix.flowman.spec.mapping.AssembleMapping.StructEntry +import com.dimajix.flowman.transforms.AnalysisException import com.dimajix.flowman.transforms.schema.Path import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class AssembleMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { private val inputJson = """ |{ @@ -50,8 +51,7 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession | "other_field":456 | } | }, - | "lala": { - | }, + | "lala": 23, | "embedded" : { | "structure": { | "secret": { @@ -151,14 +151,14 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession it should "transform DataFrames correctly" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = AssembleMapping( Mapping.Properties(session.context), MappingOutputIdentifier("input_df"), Seq( NestEntry("clever_name", Path("stupidName"), Seq(), Seq(Path("secret.field"))), - AppendEntry(Path(""), Seq(Path("lala"), Path("lolo")), Seq()), + AppendEntry(Path(""), Seq(Path("lala")), Seq()), AppendEntry(Path(""), Seq(), Seq(Path("stupidName"), Path("embedded.structure.secret"), Path("embedded.old_structure"))), StructEntry("sub_structure", Seq( AppendEntry(Path("embedded.old_structure"), Seq(), Seq()) @@ -175,6 +175,7 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession StructField("other_field", LongType) ))) ))), + StructField("lala", LongType), StructField("embedded", StructType(Seq( StructField("struct_array", ArrayType( StructType(Seq( @@ -186,6 +187,7 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession StructField("public", StringType) ))) ))), + StructField("lala", LongType), StructField("sub_structure", StructType(Seq( StructField("value", ArrayType(LongType)) ))), @@ -198,14 +200,14 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession it should "provide a correct output schema" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = AssembleMapping( Mapping.Properties(session.context), MappingOutputIdentifier("input_df"), Seq( NestEntry("clever_name", Path("stupidName"), Seq(), Seq(Path("secret.field"))), - AppendEntry(Path(""), Seq(Path("lala"), Path("lolo")), Seq()), + AppendEntry(Path(""), Seq(Path("lala")), Seq()), AppendEntry(Path(""), Seq(), Seq(Path("stupidName"), Path("embedded.structure.secret"), Path("embedded.old_structure"))), StructEntry("sub_structure", Seq( AppendEntry(Path("embedded.old_structure"), Seq(), Seq()) @@ -220,6 +222,7 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession StructField("other_field", LongType) )), true) )), true), + StructField("lala", LongType, true), StructField("embedded", StructType(Seq( StructField("struct_array", ArrayType( StructType(Seq( @@ -231,6 +234,7 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession StructField("public", StringType) )), true) )), true), + StructField("lala", LongType, true), StructField("sub_structure", StructType(Seq( StructField("value", ArrayType(LongType)) )), true), @@ -243,7 +247,7 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession it should "support explodes of complex types with rename" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = AssembleMapping( Mapping.Properties(session.context), @@ -270,9 +274,31 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession outputDf.count() should be (2) } + it should "throw an exception on missing fields in 'keep'" in { + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + + val mapping = AssembleMapping( + Mapping.Properties(session.context), + MappingOutputIdentifier("input_df"), + Seq( + NestEntry("clever_name", Path("stupidName"), Seq(), Seq(Path("secret.field"))), + AppendEntry(Path(""), Seq(Path("lala"), Path("lolo")), Seq()), + AppendEntry(Path(""), Seq(), Seq(Path("stupidName"), Path("embedded.structure.secret"), Path("embedded.old_structure"))), + StructEntry("sub_structure", Seq( + AppendEntry(Path("embedded.old_structure"), Seq(), Seq()) + )), + LiftEntry(Path("stupidName"), Seq(Path("secret.field"))) + ) + ) + + an[AnalysisException] shouldBe thrownBy(mapping.execute(executor, Map(MappingOutputIdentifier("input_df") -> inputDf))) + an[AnalysisException] shouldBe thrownBy(mapping.describe(executor, Map(MappingOutputIdentifier("input_df") -> ftypes.StructType.of(inputDf.schema)), "main")) + } + it should "support explodes of complex types without rename" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = AssembleMapping( Mapping.Properties(session.context), @@ -301,7 +327,7 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession it should "support explodes of simple types" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = AssembleMapping( Mapping.Properties(session.context), @@ -325,7 +351,7 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession it should "throw exceptions on explodes of non-existing paths" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = AssembleMapping( Mapping.Properties(session.context), @@ -340,7 +366,7 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession it should "support rename operations" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = AssembleMapping( Mapping.Properties(session.context), @@ -366,7 +392,7 @@ class AssembleMappingTest extends FlatSpec with Matchers with LocalSparkSession it should "not throw an exception on renames of non-existing fields" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = AssembleMapping( Mapping.Properties(session.context), diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/CaseMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/CaseMappingTest.scala new file mode 100644 index 000000000..b5e0d32c9 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/CaseMappingTest.scala @@ -0,0 +1,65 @@ +/* + * Copyright 2018-2019 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Module +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + + +class CaseMappingTest extends AnyFlatSpec with Matchers { + val spec = + """ + |mappings: + | switch: + | kind: case + | cases: + | - condition: $env == '1' + | input: in_1 + | - condition: $env == '2' + | input: in_2 + | - condition: true + | input: default + |""".stripMargin + + "A CaseMapping" should "select the default case" in { + val project = Module.read.string(spec).toProject("project") + val mapping = project.mappings("switch") + + mapping shouldBe an[CaseMappingSpec] + + val session = Session.builder().withEnvironment("env", "").build() + val context = session.getContext(project) + val instance = context.getMapping(MappingIdentifier("switch")).asInstanceOf[AliasMapping] + instance shouldBe an[AliasMapping] + instance.input should be (MappingOutputIdentifier("default")) + } + + it should "select the first valid case" in { + val project = Module.read.string(spec).toProject("project") + + val session = Session.builder().withEnvironment("env", "2").build() + val context = session.getContext(project) + val instance = context.getMapping(MappingIdentifier("switch")).asInstanceOf[AliasMapping] + instance shouldBe an[AliasMapping] + instance.input should be (MappingOutputIdentifier("in_2")) + + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/CoalesceMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/CoalesceMappingTest.scala new file mode 100644 index 000000000..10f12d1ba --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/CoalesceMappingTest.scala @@ -0,0 +1,77 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class CoalesceMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "An CoalesceMapping" should "be parseable" in { + val spec = + """ + |mappings: + | m1: + | kind: coalesce + | input: some_mapping + | partitions: 1 + """.stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + + val mapping = project.mappings("m1") + mapping shouldBe a[CoalesceMappingSpec] + + val instance = context.getMapping(MappingIdentifier("m1")) + instance shouldBe a[CoalesceMapping] + + val typedInstance = instance.asInstanceOf[CoalesceMapping] + typedInstance.input should be (MappingOutputIdentifier("some_mapping")) + typedInstance.outputs should be (Seq("main")) + typedInstance.partitions should be (1) + } + + it should "work" in { + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + + val input = spark.range(100).repartition(10).toDF() + val inputSchema = StructType.of(input.schema) + + val mapping = CoalesceMapping( + Mapping.Properties(session.context), + MappingOutputIdentifier("input"), + 1 + ) + + mapping.describe(executor, Map(MappingOutputIdentifier("input") -> inputSchema)) should be (Map("main" -> inputSchema)) + + val result = mapping.execute(executor, Map(MappingOutputIdentifier("input") -> input))("main") + result.rdd.partitions.size should be (1) + result.count() should be (100) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ConformMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ConformMappingTest.scala index 191c331d6..3ca68fb5f 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ConformMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ConformMappingTest.scala @@ -24,11 +24,12 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.flowman.transforms.CaseFormat @@ -36,7 +37,7 @@ import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class ConformMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class ConformMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { private val inputJson = """ |{ @@ -85,11 +86,16 @@ class ConformMappingTest extends FlatSpec with Matchers with LocalSparkSession { val mapping = project.mappings("my_structure") mapping shouldBe an[ConformMappingSpec] + + val session = Session.builder().build() + val context = session.getContext(project) + val instance = context.getMapping(MappingIdentifier("my_structure")) + instance shouldBe an[ConformMapping] } it should "support changing types in DataFrames" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = ConformMapping( Mapping.Properties(session.context), @@ -122,7 +128,7 @@ class ConformMappingTest extends FlatSpec with Matchers with LocalSparkSession { it should "throw an error for arrays" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = ConformMapping( Mapping.Properties(session.context), @@ -137,7 +143,7 @@ class ConformMappingTest extends FlatSpec with Matchers with LocalSparkSession { it should "support renaming fields" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = ConformMapping( Mapping.Properties(session.context), @@ -168,7 +174,7 @@ class ConformMappingTest extends FlatSpec with Matchers with LocalSparkSession { it should "support flattening nested structures" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = ConformMapping( Mapping.Properties(session.context), diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/DeduplicateMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/DeduplicateMappingTest.scala index b23c83936..d45cf832e 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/DeduplicateMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/DeduplicateMappingTest.scala @@ -16,13 +16,13 @@ package com.dimajix.flowman.spec.mapping -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping -import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.flowman.spec.mapping.DeduplicateMappingTest.Record import com.dimajix.flowman.types.Field @@ -37,48 +37,61 @@ object DeduplicateMappingTest { } -class DeduplicateMappingTest extends FlatSpec with Matchers with LocalSparkSession { - - "The DeduplicateMapping" should "work without list of columns" in { - val sparkSession = spark - import sparkSession.implicits._ +class DeduplicateMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "The DeduplicateMapping" should "be parseable" in { val spec = """ |mappings: - | dummy: - | kind: provided - | table: my_table | dedup: | kind: deduplicate | input: dummy """.stripMargin val project = Module.read.string(spec).toProject("project") - project.mappings.keys should contain("dedup") + val mapping = project.mappings("dedup") - val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + mapping shouldBe an[DeduplicateMappingSpec] + + val session = Session.builder().build() val context = session.getContext(project) - executor.spark.createDataFrame(Seq( + val instance = context.getMapping(MappingIdentifier("dedup")) + instance should not be null + instance shouldBe a[DeduplicateMapping] + } + + it should "work without list of columns" in { + val sparkSession = spark + import sparkSession.implicits._ + + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + val context = session.context + + val mapping = DeduplicateMapping( + Mapping.Properties(context), + MappingOutputIdentifier("input"), + Seq("c1") + ) + + val input = executor.spark.createDataFrame(Seq( Record("c1_v1", "c2_v1"), Record("c1_v1", "c2_v2"), Record("c1_v1", "c2_v2") - )).createOrReplaceTempView("my_table") - - val mapping = context.getMapping(MappingIdentifier("dedup")) - mapping should not be null + )) - val df = executor.instantiate(mapping, "main") - val rows = df.as[Record].collect() - rows.size should be(2) + // Verify execution + val result = mapping.execute(executor, Map(MappingOutputIdentifier("input") -> input))("main") + result.schema should be(input.schema) + val rows = result.as[Record].collect() + rows.size should be(1) // Verify schema - val inputSchema = StructType(Seq( + val inputSchema = StructType(Seq( Field("c1", StringType), Field("c2", IntegerType, nullable = true) )) - mapping.describe(executor, Map(MappingOutputIdentifier("dummy") -> inputSchema)) should be (Map("main" -> inputSchema)) + mapping.describe(executor, Map(MappingOutputIdentifier("input") -> inputSchema)) should be (Map("main" -> inputSchema)) } it should "work with an explicit column list" in { @@ -86,7 +99,7 @@ class DeduplicateMappingTest extends FlatSpec with Matchers with LocalSparkSessi import sparkSession.implicits._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.context val mapping = DeduplicateMapping( @@ -118,7 +131,7 @@ class DeduplicateMappingTest extends FlatSpec with Matchers with LocalSparkSessi ).toDS) val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.context val mapping = DeduplicateMapping( diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/DistinctMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/DistinctMappingTest.scala new file mode 100644 index 000000000..0f6d205c7 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/DistinctMappingTest.scala @@ -0,0 +1,104 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.spec.mapping.DeduplicateMappingTest.Record +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class DistinctMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { + + "The DistinctMapping" should "be parseable" in { + val spec = + """ + |mappings: + | dedup: + | kind: distinct + | input: dummy + """.stripMargin + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + + val mapping = project.mappings("dedup") + mapping shouldBe a[DistinctMappingSpec] + + val instance = context.getMapping(MappingIdentifier("dedup")) + instance shouldBe a[DistinctMapping] + } + + it should "work" in { + val sparkSession = spark + import sparkSession.implicits._ + + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + val context = session.context + + val mapping = DistinctMapping( + Mapping.Properties(context), + MappingOutputIdentifier("input") + ) + + val input = executor.spark.createDataFrame(Seq( + Record("c1_v1", "c2_v1"), + Record("c1_v1", "c2_v2"), + Record("c1_v1", "c2_v2") + )) + + val result = mapping.execute(executor, Map(MappingOutputIdentifier("input") -> input))("main") + result.schema should be(input.schema) + val rows = result.as[Record].collect() + rows.size should be(2) + } + + it should "work with nested columns" in { + val sparkSession = spark + import sparkSession.implicits._ + + val input = spark.read.json(Seq( + """{"some_struct":{"f_1":12, "f_2":22},"other_struct":{"integer":13}}""", + """{"some_struct":{"f_1":12, "f_2":22},"other_struct":{"integer":13}}""", + """{"some_struct":{"f_1":12, "f_2":23},"other_struct":{"integer":13}}""" + ).toDS) + val inputSchema = StructType.of(input.schema) + + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + val context = session.context + + val mapping = DistinctMapping( + Mapping.Properties(context), + MappingOutputIdentifier("input") + ) + + mapping.describe(executor, Map(MappingOutputIdentifier("input") -> inputSchema)) should be (Map("main" -> inputSchema)) + + val result = mapping.execute(executor, Map(MappingOutputIdentifier("input") -> input))("main") + result.schema should be(input.schema) + result.count should be(2) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/DropMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/DropMappingTest.scala index 7c83b2110..594cffe0a 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/DropMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/DropMappingTest.scala @@ -19,11 +19,12 @@ package com.dimajix.flowman.spec.mapping import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.flowman.transforms.schema.Path @@ -31,7 +32,7 @@ import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class DropMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class DropMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The DropMapping" should "be parseable" in { val spec = """ @@ -46,6 +47,11 @@ class DropMappingTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") project.mappings.keys should contain("drop") project.mappings("drop") shouldBe a[DropMappingSpec] + + val session = Session.builder().build() + val context = session.getContext(project) + val instance = context.getMapping(MappingIdentifier("drop")) + instance shouldBe an[DropMapping] } it should "drop known columns" in { @@ -57,7 +63,7 @@ class DropMappingTest extends FlatSpec with Matchers with LocalSparkSession { ) val inputDf = records.toDF val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = DropMapping( Mapping.Properties(session.context), @@ -85,7 +91,7 @@ class DropMappingTest extends FlatSpec with Matchers with LocalSparkSession { ) val inputDf = records.toDF val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = DropMapping( Mapping.Properties(session.context), diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExplodeMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExplodeMappingTest.scala index 98f991e00..9a38f8901 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExplodeMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExplodeMappingTest.scala @@ -22,8 +22,8 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping @@ -35,7 +35,7 @@ import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class ExplodeMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class ExplodeMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { private val inputJson = """ |{ @@ -94,7 +94,7 @@ class ExplodeMappingTest extends FlatSpec with Matchers with LocalSparkSession { it should "work" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = ExplodeMapping( Mapping.Properties(session.context), @@ -133,7 +133,7 @@ class ExplodeMappingTest extends FlatSpec with Matchers with LocalSparkSession { it should "select specified columns" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = ExplodeMapping( Mapping.Properties(session.context), @@ -170,7 +170,7 @@ class ExplodeMappingTest extends FlatSpec with Matchers with LocalSparkSession { it should "support renaming columns" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = ExplodeMapping( Mapping.Properties(session.context), @@ -208,7 +208,7 @@ class ExplodeMappingTest extends FlatSpec with Matchers with LocalSparkSession { it should "explode simple arrays" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = ExplodeMapping( Mapping.Properties(session.context), diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExtendMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExtendMappingTest.scala index d3d892a20..47bd9ec2b 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExtendMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExtendMappingTest.scala @@ -17,18 +17,18 @@ package com.dimajix.flowman.spec.mapping import org.apache.spark.sql.Row -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping -import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.spark.testing.LocalSparkSession -class ExtendMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class ExtendMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The SqlExtend" should "work" in { val df = spark.createDataFrame(Seq( ("col1", 12), @@ -36,7 +36,7 @@ class ExtendMappingTest extends FlatSpec with Matchers with LocalSparkSession { )) val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val xfs = ExtendMapping( Mapping.Properties(session.context), @@ -61,7 +61,7 @@ class ExtendMappingTest extends FlatSpec with Matchers with LocalSparkSession { )) val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val xfs = ExtendMapping( Mapping.Properties(session.context), @@ -95,7 +95,7 @@ class ExtendMappingTest extends FlatSpec with Matchers with LocalSparkSession { )) val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val xfs = ExtendMapping( Mapping.Properties(session.context), @@ -119,7 +119,7 @@ class ExtendMappingTest extends FlatSpec with Matchers with LocalSparkSession { it should "detect dependency cycles" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val xfs = ExtendMapping( Mapping.Properties(session.context), @@ -156,7 +156,7 @@ class ExtendMappingTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) project.mappings.size should be (2) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExtractJsonMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExtractJsonMappingTest.scala index c39519b9f..3c32b71d2 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExtractJsonMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ExtractJsonMappingTest.scala @@ -23,18 +23,18 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session -import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class ExtractJsonMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class ExtractJsonMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The ExtractJsonMapping" should "be parseable" in { val spec = """ @@ -97,7 +97,7 @@ class ExtractJsonMappingTest extends FlatSpec with Matchers with LocalSparkSessi val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val input = executor.spark.createDataFrame(Seq( @@ -151,7 +151,7 @@ class ExtractJsonMappingTest extends FlatSpec with Matchers with LocalSparkSessi val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val input = executor.spark.createDataFrame(Seq( @@ -202,7 +202,7 @@ class ExtractJsonMappingTest extends FlatSpec with Matchers with LocalSparkSessi val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val input = executor.spark.createDataFrame(Seq( @@ -250,7 +250,7 @@ class ExtractJsonMappingTest extends FlatSpec with Matchers with LocalSparkSessi val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val input = executor.spark.createDataFrame(Seq( @@ -290,7 +290,7 @@ class ExtractJsonMappingTest extends FlatSpec with Matchers with LocalSparkSessi val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val input = executor.spark.createDataFrame(Seq( @@ -339,7 +339,7 @@ class ExtractJsonMappingTest extends FlatSpec with Matchers with LocalSparkSessi val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val input = executor.spark.createDataFrame(Seq( @@ -379,7 +379,7 @@ class ExtractJsonMappingTest extends FlatSpec with Matchers with LocalSparkSessi val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val input = executor.spark.createDataFrame(Seq( @@ -410,7 +410,7 @@ class ExtractJsonMappingTest extends FlatSpec with Matchers with LocalSparkSessi val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val input = executor.spark.createDataFrame(Seq( diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/FilterMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/FilterMappingTest.scala new file mode 100644 index 000000000..9ffb2d0a5 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/FilterMappingTest.scala @@ -0,0 +1,76 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class FilterMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "An FilterMapping" should "be parseable" in { + val spec = + """ + |mappings: + | m1: + | kind: filter + | input: some_mapping + | condition: "value < 50" + """.stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + + val mapping = project.mappings("m1") + mapping shouldBe a[FilterMappingSpec] + + val instance = context.getMapping(MappingIdentifier("m1")) + instance shouldBe a[FilterMapping] + + val filter = instance.asInstanceOf[FilterMapping] + filter.input should be (MappingOutputIdentifier("some_mapping")) + filter.outputs should be (Seq("main")) + filter.condition should be ("value < 50") + } + + it should "work" in { + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + + val input = spark.range(100).toDF() + val inputSchema = StructType.of(input.schema) + + val mapping = FilterMapping( + Mapping.Properties(session.context), + MappingOutputIdentifier("input"), + "id < 50" + ) + + mapping.describe(executor, Map(MappingOutputIdentifier("input") -> inputSchema)) should be (Map("main" -> inputSchema)) + + val result = mapping.execute(executor, Map(MappingOutputIdentifier("input") -> input))("main") + result.count() should be (50) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/FlattenMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/FlattenMappingTest.scala index 48b9857f3..4e71594b4 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/FlattenMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/FlattenMappingTest.scala @@ -19,11 +19,12 @@ package com.dimajix.flowman.spec.mapping import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.flowman.transforms.CaseFormat @@ -31,7 +32,7 @@ import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class FlattenMappingTest extends FlatSpec with Matchers with LocalSparkSession{ +class FlattenMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession{ "A FlattenMapping" should "be parseable" in { val spec = """ @@ -43,8 +44,12 @@ class FlattenMappingTest extends FlatSpec with Matchers with LocalSparkSession{ val project = Module.read.string(spec).toProject("project") val mapping = project.mappings("my_structure") - mapping shouldBe an[FlattenMappingSpec] + + val session = Session.builder().build() + val context = session.getContext(project) + val instance = context.getMapping(MappingIdentifier("my_structure")) + instance shouldBe an[FlattenMapping] } it should "flatten nested structures" in { @@ -63,7 +68,7 @@ class FlattenMappingTest extends FlatSpec with Matchers with LocalSparkSession{ import spark.implicits._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val inputRecords = Seq(inputJson.replace("\n","")) val inputDs = spark.createDataset(inputRecords) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/HistorizeMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/HistorizeMappingTest.scala index de1eb51cd..0bdc33681 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/HistorizeMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/HistorizeMappingTest.scala @@ -24,25 +24,25 @@ import org.apache.spark.sql.types.LongType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping -import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.spark.sql.catalyst.SqlBuilder import com.dimajix.spark.testing.LocalSparkSession -class HistorizeMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class HistorizeMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The HistorizeMapping" should "extract the latest version" in { val spark = this.spark import spark.implicits._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val json_1 = Seq( """{"ts":123,"id":12, "a":[12,2], "op":"CREATE"}""", @@ -100,7 +100,7 @@ class HistorizeMappingTest extends FlatSpec with Matchers with LocalSparkSession import spark.implicits._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val json_1 = Seq( """{"ts":123,"id":12}""" @@ -149,7 +149,7 @@ class HistorizeMappingTest extends FlatSpec with Matchers with LocalSparkSession import spark.implicits._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val json_1 = Seq( """{"version_major":1, "version_minor":1, "ts":123,"id":12, "a":[12,2], "op":"CREATE"}""", @@ -232,7 +232,7 @@ class HistorizeMappingTest extends FlatSpec with Matchers with LocalSparkSession val session = Session.builder().withSparkSession(spark).build() val project = Module.read.string(spec).toProject("default") val context = session.getContext(project) - val executor = session.executor + val executor = session.execution val mapping = context.getMapping(MappingIdentifier("history")) val df = executor.instantiate(mapping, "main") diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/InputMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/InputMappingTest.scala index 9cdcf3ee4..ec0c1795d 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/InputMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/InputMappingTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec.mapping -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.MappingIdentifier @@ -29,7 +29,7 @@ import com.dimajix.flowman.types.StructType import com.dimajix.spark.testing.LocalSparkSession -class InputMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class InputMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The ReadRelationMapping" should "be able to read from a NullRelation" in { val spec = """ @@ -49,7 +49,7 @@ class InputMappingTest extends FlatSpec with Matchers with LocalSparkSession { project.mappings.keys should contain("empty") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val mapping = context.getMapping(MappingIdentifier("empty")) @@ -89,7 +89,7 @@ class InputMappingTest extends FlatSpec with Matchers with LocalSparkSession { project.mappings.keys should contain("empty") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val mapping = context.getMapping(MappingIdentifier("empty")) @@ -134,7 +134,7 @@ class InputMappingTest extends FlatSpec with Matchers with LocalSparkSession { project.mappings.keys should contain("empty") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val mapping = context.getMapping(MappingIdentifier("empty")) @@ -176,7 +176,7 @@ class InputMappingTest extends FlatSpec with Matchers with LocalSparkSession { project.mappings.keys should contain("empty") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val mapping = context.getMapping(MappingIdentifier("empty")) @@ -222,7 +222,7 @@ class InputMappingTest extends FlatSpec with Matchers with LocalSparkSession { project.mappings.keys should contain("empty") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val mapping = context.getMapping(MappingIdentifier("empty")) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/JoinMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/JoinMappingTest.scala index a9fa8da9d..01c75dd67 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/JoinMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/JoinMappingTest.scala @@ -23,8 +23,8 @@ import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping @@ -33,7 +33,7 @@ import com.dimajix.flowman.spec.ObjectMapper import com.dimajix.spark.testing.LocalSparkSession -class JoinMappingTest extends FlatSpec with Matchers with LocalSparkSession{ +class JoinMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession{ "The JoinMapping" should "support joining on columns" in { val df1 = spark.createDataFrame(Seq( Row("col1", 12), @@ -59,7 +59,7 @@ class JoinMappingTest extends FlatSpec with Matchers with LocalSparkSession{ ) val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = JoinMapping( Mapping.Properties(session.context), @@ -112,7 +112,7 @@ class JoinMappingTest extends FlatSpec with Matchers with LocalSparkSession{ ) val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = JoinMapping( Mapping.Properties(session.context), diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/MappingSpecTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/MappingSpecTest.scala new file mode 100644 index 000000000..85ed9d7a0 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/MappingSpecTest.scala @@ -0,0 +1,45 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.spec.annotation.RelationType + + +@RelationType(kind = "annotatedMapping") +class AnnotationMappingSpec extends MappingSpec { + override def instantiate(context: Context): Mapping = ??? +} + +class MappingSpecTest extends AnyFlatSpec with Matchers { + "MappingSpec" should "support custom mappings" in { + val spec = + """ + |mappings: + | custom: + | kind: annotatedMapping + """.stripMargin + val module = Module.read.string(spec) + module.mappings.keys should contain("custom") + module.mappings("custom") shouldBe a[AnnotationMappingSpec] + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/MockMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/MockMappingTest.scala new file mode 100644 index 000000000..e0f72881d --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/MockMappingTest.scala @@ -0,0 +1,243 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import org.apache.spark.sql.Row +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.execution.RootContext +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.Template +import com.dimajix.flowman.types.ArrayRecord +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.IntegerType +import com.dimajix.flowman.types.StringType +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class MockMappingTest extends AnyFlatSpec with Matchers with MockFactory with LocalSparkSession { + "The MockMapping" should "be parseable" in { + val spec = + """ + |mappings: + | empty: + | kind: null + | fields: + | str_col: string + | int_col: integer + | + | mock: + | kind: mock + | mapping: empty + | records: + | - ["a",12,3] + | - [cat,"",7] + | - [dog,null,8] + |""".stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().build() + val context = session.getContext(project) + + val mapping = context.getMapping(MappingIdentifier("mock")).asInstanceOf[MockMapping] + mapping shouldBe a[MockMapping] + + mapping.category should be ("mapping") + mapping.kind should be ("mock") + mapping.mapping should be (MappingIdentifier("empty")) + mapping.output should be (MappingOutputIdentifier("project/mock:main")) + mapping.outputs should be (Seq("main")) + mapping.records should be (Seq( + ArrayRecord("a","12","3"), + ArrayRecord("cat","","7"), + ArrayRecord("dog",null,"8") + )) + } + + it should "create empty DataFrames" in { + val baseMappingTemplate = mock[Template[Mapping]] + val baseMapping = mock[Mapping] + val mockMappingTemplate = mock[Template[Mapping]] + + val project = Project( + "my_project", + mappings = Map( + "base" -> baseMappingTemplate, + "mock" -> mockMappingTemplate + ) + ) + val otherSchema = new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + val errorSchema = new StructType(Seq( + Field("error", StringType) + )) + + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + val executor = session.execution + + val mockMapping = MockMapping( + Mapping.Properties(context, "mock"), + MappingIdentifier("base") + ) + + (mockMappingTemplate.instantiate _).expects(context).returns(mockMapping) + val mapping = context.getMapping(MappingIdentifier("mock")) + mapping shouldBe a[MockMapping] + mapping.category should be ("mapping") + + (baseMappingTemplate.instantiate _).expects(context).returns(baseMapping) + (baseMapping.outputs _).expects().anyNumberOfTimes().returns(Seq("other", "error")) + mapping.outputs should be (Seq("other", "error")) + + (baseMapping.output _).expects().returns(MappingOutputIdentifier("base", "other", Some(project.name))) + mapping.output should be (MappingOutputIdentifier("my_project/mock:other")) + + (baseMapping.inputs _).expects().anyNumberOfTimes().returns(Seq()) + (baseMapping.describe:(Execution,Map[MappingOutputIdentifier,StructType],String) => StructType).expects(executor,*,"other") + .anyNumberOfTimes().returns(otherSchema) + (baseMapping.describe:(Execution,Map[MappingOutputIdentifier,StructType],String) => StructType).expects(executor,*,"error") + .anyNumberOfTimes().returns(errorSchema) + mapping.describe(executor, Map()) should be (Map( + "other" -> otherSchema, + "error" -> errorSchema + )) + + mapping.describe(executor, Map(), "other") should be (otherSchema) + + val dfOther = executor.instantiate(mapping, "other") + dfOther.columns should contain("str_col") + dfOther.columns should contain("int_col") + dfOther.count() should be (0) + + val dfError = executor.instantiate(mapping, "error") + dfError.columns should contain("error") + dfError.count() should be (0) + } + + it should "work nicely as an override" in { + val baseMappingTemplate = mock[Template[Mapping]] + val baseMapping = mock[Mapping] + val mockMappingTemplate = mock[Template[Mapping]] + + val project = Project( + "my_project", + mappings = Map( + "mock" -> baseMappingTemplate + ) + ) + val schema = new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + + val session = Session.builder().withSparkSession(spark).build() + val rootContext = RootContext.builder(session.context) + .overrideMappings(Map( + MappingIdentifier("mock", "my_project") -> mockMappingTemplate + )) + .build() + val context = rootContext.getProjectContext(project) + val executor = session.execution + + val mockMapping = MockMapping( + Mapping.Properties(context, "mock"), + MappingIdentifier("mock") + ) + + (mockMappingTemplate.instantiate _).expects(context).returns(mockMapping) + val mapping = context.getMapping(MappingIdentifier("mock")) + + (baseMappingTemplate.instantiate _).expects(context).returns(baseMapping) + (baseMapping.outputs _).expects().anyNumberOfTimes().returns(Seq("main")) + mapping.outputs should be (Seq("main")) + + (baseMapping.output _).expects().returns(MappingOutputIdentifier("mock", "main", Some(project.name))) + mapping.output should be (MappingOutputIdentifier("my_project/mock:main")) + + (baseMapping.inputs _).expects().anyNumberOfTimes().returns(Seq()) + (baseMapping.describe:(Execution,Map[MappingOutputIdentifier,StructType],String) => StructType).expects(executor,*,"main") + .anyNumberOfTimes().returns(schema) + mapping.describe(executor, Map()) should be (Map("main" -> schema)) + mapping.describe(executor, Map(), "main") should be (schema) + + val dfOther = executor.instantiate(mapping, "main") + dfOther.columns should contain("str_col") + dfOther.columns should contain("int_col") + dfOther.count() should be (0) + } + + it should "work with specified records" in { + val baseMappingTemplate = mock[Template[Mapping]] + val baseMapping = mock[Mapping] + val mockMappingTemplate = mock[Template[Mapping]] + + val project = Project( + "my_project", + mappings = Map( + "base" -> baseMappingTemplate, + "mock" -> mockMappingTemplate + ) + ) + val schema = new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + val executor = session.execution + + val mockMapping = MockMapping( + Mapping.Properties(context, "mock"), + MappingIdentifier("base"), + Seq( + ArrayRecord("lala","12"), + ArrayRecord("lolo","13"), + ArrayRecord("",null) + ) + ) + + (mockMappingTemplate.instantiate _).expects(context).returns(mockMapping) + val mapping = context.getMapping(MappingIdentifier("mock")) + + (baseMappingTemplate.instantiate _).expects(context).returns(baseMapping) + (baseMapping.outputs _).expects().anyNumberOfTimes().returns(Seq("main")) + (baseMapping.inputs _).expects().anyNumberOfTimes().returns(Seq()) + (baseMapping.describe:(Execution,Map[MappingOutputIdentifier,StructType],String) => StructType).expects(executor,*,"main") + .anyNumberOfTimes().returns(schema) + + val df = executor.instantiate(mapping, "main") + df.schema should be (schema.sparkType) + df.collect() should be (Seq( + Row("lala", 12), + Row("lolo", 13), + Row(null,null) + )) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/NullMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/NullMappingTest.scala new file mode 100644 index 000000000..9261caafe --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/NullMappingTest.scala @@ -0,0 +1,180 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.spec.schema.EmbeddedSchema +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.IntegerType +import com.dimajix.flowman.types.StringType +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class NullMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "The NullMapping" should "be parseable" in { + val spec = + """ + |mappings: + | empty1: + | kind: null + | fields: + | str_col: string + | int_col: integer + | + | empty2: + | kind: empty + | schema: + | kind: embedded + | fields: + | - name: str_col + | type: string + | - name: int_col + | type: integer + |""".stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().build() + val context = session.getContext(project) + + val mapping1 = context.getMapping(MappingIdentifier("empty1")).asInstanceOf[NullMapping] + mapping1 shouldBe a[NullMapping] + + mapping1.category should be ("mapping") + mapping1.kind should be ("null") + mapping1.fields should be (Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + mapping1.schema should be (None) + mapping1.output should be (MappingOutputIdentifier("project/empty1:main")) + mapping1.outputs should be (Seq("main")) + } + + it should "create empty DataFrames with specified columns" in { + val session = Session.builder().withSparkSession(spark).build() + val context = session.context + val executor = session.execution + + val mapping = NullMapping( + Mapping.Properties(context, "empty"), + Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + ), + None + ) + + mapping.category should be ("mapping") + //mapping.kind should be ("null") + mapping.outputs should be (Seq("main")) + mapping.output should be (MappingOutputIdentifier("empty")) + + mapping.describe(executor, Map()) should be (Map( + "main" -> new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + )) + mapping.describe(executor, Map(), "main") should be ( + new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + ) + + val df = executor.instantiate(mapping, "main") + df.columns should contain("str_col") + df.columns should contain("int_col") + df.count() should be (0) + } + + it should "create empty DataFrames with specified schema" in { + val session = Session.builder().withSparkSession(spark).build() + val context = session.context + val executor = session.execution + + val mapping = NullMapping( + Mapping.Properties(context, "empty"), + Seq(), + Some(EmbeddedSchema( + Schema.Properties(context), + fields = Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + ) + )) + ) + + mapping.category should be ("mapping") + //mapping.kind should be ("null") + mapping.outputs should be (Seq("main")) + mapping.output should be (MappingOutputIdentifier("empty")) + + mapping.describe(executor, Map()) should be (Map( + "main" -> new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + )) + mapping.describe(executor, Map(), "main") should be ( + new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + ) + + val df = executor.instantiate(mapping, "main") + df.columns should contain("str_col") + df.columns should contain("int_col") + df.count() should be (0) + } + + it should "raise an error on wrong construction" in { + val session = Session.builder().build() + val context = session.context + + an[IllegalArgumentException] should be thrownBy (NullMapping( + Mapping.Properties(context, "empty"), + Seq(), + None + )) + + an[IllegalArgumentException] should be thrownBy (NullMapping( + Mapping.Properties(context, "empty"), + Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + ), + Some(EmbeddedSchema( + Schema.Properties(context), + fields = Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + ) + )) + )) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ProjectMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ProjectMappingTest.scala index fda6347b8..31e614136 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ProjectMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ProjectMappingTest.scala @@ -17,8 +17,8 @@ package com.dimajix.flowman.spec.mapping import org.apache.spark.sql.Row -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping @@ -34,7 +34,7 @@ import com.dimajix.flowman.types.StructType import com.dimajix.spark.testing.LocalSparkSession -class ProjectMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class ProjectMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The ProjectMapping" should "work" in { val df = spark.createDataFrame(Seq( ("col1", 12), @@ -42,7 +42,7 @@ class ProjectMappingTest extends FlatSpec with Matchers with LocalSparkSession { )) val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = ProjectMapping( Mapping.Properties(session.context), @@ -82,7 +82,7 @@ class ProjectMappingTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution project.mappings.size should be (1) project.mappings.contains("t0") should be (false) @@ -123,7 +123,7 @@ class ProjectMappingTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val df = spark.createDataFrame(Seq( ("col1", 12), @@ -148,7 +148,7 @@ class ProjectMappingTest extends FlatSpec with Matchers with LocalSparkSession { import spark.implicits._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ProvidedMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ProvidedMappingTest.scala index c085ce4d0..dc91b6ab2 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ProvidedMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ProvidedMappingTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec.mapping -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.MappingIdentifier @@ -25,7 +25,7 @@ import com.dimajix.flowman.model.Module import com.dimajix.spark.testing.LocalSparkSession -class ProvidedMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class ProvidedMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The ProvidedMapping" should "work" in { val spec = """ @@ -38,7 +38,7 @@ class ProvidedMappingTest extends FlatSpec with Matchers with LocalSparkSession project.mappings.keys should contain("dummy") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) executor.spark.emptyDataFrame.createOrReplaceTempView("my_table") diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RankMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RankMappingTest.scala index 9ae068f3e..b15fd77f1 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RankMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RankMappingTest.scala @@ -19,8 +19,8 @@ package com.dimajix.flowman.spec.mapping import scala.collection.mutable import org.apache.spark.sql.Row -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping @@ -32,17 +32,18 @@ import com.dimajix.flowman.spec.mapping.RankMappingTest.Record import com.dimajix.spark.sql.catalyst.SqlBuilder import com.dimajix.spark.testing.LocalSparkSession + object RankMappingTest { case class Record(ts:(String,Long), id:(String,Int), data:String) } -class RankMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class RankMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The RankMapping" should "extract the latest version" in { val spark = this.spark import spark.implicits._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val json_1 = Seq( """{"ts":123,"id":12, "a":[12,2], "op":"CREATE"}""", @@ -84,7 +85,7 @@ class RankMappingTest extends FlatSpec with Matchers with LocalSparkSession { import spark.implicits._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val json_1 = Seq( """{"ts":123,"id":12, "a":[12,2], "op":"CREATE"}""", @@ -124,7 +125,7 @@ class RankMappingTest extends FlatSpec with Matchers with LocalSparkSession { import spark.implicits._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val json_1 = Seq( """{"ts":123,"id":12, "a":[12,1], "op":"CREATE"}""", @@ -156,7 +157,7 @@ class RankMappingTest extends FlatSpec with Matchers with LocalSparkSession { import spark.implicits._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val df = Seq( Record(("ts_0", 123), ("id_0", 7), "lala") @@ -255,7 +256,7 @@ class RankMappingTest extends FlatSpec with Matchers with LocalSparkSession { val session = Session.builder().withSparkSession(spark).build() val project = Module.read.string(spec).toProject("default") val context = session.getContext(project) - val executor = session.executor + val executor = session.execution val mapping = context.getMapping(MappingIdentifier("latest")) val df = executor.instantiate(mapping, "main") diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ReadRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ReadRelationTest.scala index b978602a1..496042064 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ReadRelationTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ReadRelationTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec.mapping -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.MappingIdentifier @@ -26,7 +26,7 @@ import com.dimajix.flowman.model.RelationIdentifier import com.dimajix.flowman.types.SingleValue -class ReadRelationTest extends FlatSpec with Matchers { +class ReadRelationTest extends AnyFlatSpec with Matchers { "A ReadRelationMapping" should "be parseable" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RebalanceMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RebalanceMappingTest.scala new file mode 100644 index 000000000..4ccb81b83 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RebalanceMappingTest.scala @@ -0,0 +1,77 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class RebalanceMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "An RebalanceMapping" should "be parseable" in { + val spec = + """ + |mappings: + | m1: + | kind: rebalance + | input: some_mapping + | partitions: 2 + """.stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + + val mapping = project.mappings("m1") + mapping shouldBe a[RebalanceMappingSpec] + + val instance = context.getMapping(MappingIdentifier("m1")) + instance shouldBe a[RebalanceMapping] + + val typedInstance = instance.asInstanceOf[RebalanceMapping] + typedInstance.input should be (MappingOutputIdentifier("some_mapping")) + typedInstance.outputs should be (Seq("main")) + typedInstance.partitions should be (2) + } + + it should "work" in { + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + + val input = spark.range(100).repartition(10).toDF() + val inputSchema = StructType.of(input.schema) + + val mapping = RebalanceMapping( + Mapping.Properties(session.context), + MappingOutputIdentifier("input"), + 1 + ) + + mapping.describe(executor, Map(MappingOutputIdentifier("input") -> inputSchema)) should be (Map("main" -> inputSchema)) + + val result = mapping.execute(executor, Map(MappingOutputIdentifier("input") -> input))("main") + result.rdd.partitions.size should be (1) + result.count() should be (100) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RecursiveSqlMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RecursiveSqlMappingTest.scala index 7f53342a7..50770acaa 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RecursiveSqlMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RecursiveSqlMappingTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec.mapping -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping @@ -29,7 +29,7 @@ import com.dimajix.flowman.types.StructType import com.dimajix.spark.testing.LocalSparkSession -class RecursiveSqlMappingTest extends FlatSpec with Matchers with LocalSparkSession{ +class RecursiveSqlMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession{ "The RecursiveSqlMapping" should "be parseable" in { val spec = """ @@ -59,7 +59,7 @@ class RecursiveSqlMappingTest extends FlatSpec with Matchers with LocalSparkSess val session = Session.builder().withSparkSession(spark).build() val context = session.context - val executor = session.executor + val executor = session.execution val mapping = RecursiveSqlMapping( Mapping.Properties(context), @@ -107,7 +107,7 @@ class RecursiveSqlMappingTest extends FlatSpec with Matchers with LocalSparkSess val session = Session.builder().withSparkSession(spark).build() val context = session.context - val executor = session.executor + val executor = session.execution val mapping = RecursiveSqlMapping( Mapping.Properties(context), diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RepartitionMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RepartitionMappingTest.scala new file mode 100644 index 000000000..860e633b9 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/RepartitionMappingTest.scala @@ -0,0 +1,130 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import org.apache.spark.sql.functions.col +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class RepartitionMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "An RepartitionMapping" should "be parseable" in { + val spec = + """ + |mappings: + | m1: + | kind: repartition + | input: some_mapping + | partitions: 2 + | columns: + | - col_1 + | - col_2 + | sort: true + """.stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + + val mapping = project.mappings("m1") + mapping shouldBe a[RepartitionMappingSpec] + + val instance = context.getMapping(MappingIdentifier("m1")) + instance shouldBe a[RepartitionMapping] + + val typedInstance = instance.asInstanceOf[RepartitionMapping] + typedInstance.input should be (MappingOutputIdentifier("some_mapping")) + typedInstance.outputs should be (Seq("main")) + typedInstance.partitions should be (2) + typedInstance.columns should be (Seq("col_1", "col_2")) + typedInstance.sort should be (true) + } + + it should "work" in { + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + + val input = spark.range(100).repartition(10).toDF() + val inputSchema = StructType.of(input.schema) + + val mapping = RepartitionMapping( + Mapping.Properties(session.context), + MappingOutputIdentifier("input"), + Seq(), + 1, + false + ) + + mapping.describe(executor, Map(MappingOutputIdentifier("input") -> inputSchema)) should be (Map("main" -> inputSchema)) + + val result = mapping.execute(executor, Map(MappingOutputIdentifier("input") -> input))("main") + result.rdd.partitions.size should be (1) + result.count() should be (100) + } + + it should "support sorting" in { + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + + val input = spark.range(100).repartition(10).toDF() + val inputSchema = StructType.of(input.schema) + + val mapping = RepartitionMapping( + Mapping.Properties(session.context), + MappingOutputIdentifier("input"), + Seq(), + 1, + true + ) + + mapping.describe(executor, Map(MappingOutputIdentifier("input") -> inputSchema)) should be (Map("main" -> inputSchema)) + + val result = mapping.execute(executor, Map(MappingOutputIdentifier("input") -> input))("main") + result.rdd.partitions.size should be (1) + result.count() should be (100) + } + + it should "support explicit columns" in { + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + + val input = spark.range(100).repartition(10).toDF().withColumn("id2", col("id")*2) + val inputSchema = StructType.of(input.schema) + + val mapping = RepartitionMapping( + Mapping.Properties(session.context), + MappingOutputIdentifier("input"), + Seq("id"), + 1, + true + ) + + mapping.describe(executor, Map(MappingOutputIdentifier("input") -> inputSchema)) should be (Map("main" -> inputSchema)) + + val result = mapping.execute(executor, Map(MappingOutputIdentifier("input") -> input))("main") + result.rdd.partitions.size should be (1) + result.count() should be (100) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SchemaMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SchemaMappingTest.scala index 60632ec59..a1be7c7c6 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SchemaMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SchemaMappingTest.scala @@ -21,42 +21,88 @@ import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.MappingIdentifier import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.FieldType +import com.dimajix.flowman.types.SchemaUtils import com.dimajix.spark.testing.LocalSparkSession -class SchemaMappingTest extends FlatSpec with Matchers with LocalSparkSession { - "The SchemaMapping" should "work" in { +class SchemaMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "The SchemaMapping" should "be parsable with columns" in { + val spec = + """ + |mappings: + | t1: + | kind: schema + | input: t0 + | columns: + | _2: string + | _1: string + """.stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + val context = session.getContext(project) + + project.mappings.size should be (1) + project.mappings.contains("t1") should be (true) + val df = spark.createDataFrame(Seq( ("col1", 12), ("col2", 23) )) + val mapping = context.getMapping(MappingIdentifier("t1")) + mapping.inputs should be (Seq(MappingOutputIdentifier("t0"))) + mapping.output should be (MappingOutputIdentifier("project/t1:main")) + mapping.identifier should be (MappingIdentifier("project/t1")) + mapping.execute(executor, Map(MappingOutputIdentifier("t0") -> df)) + } + + it should "work" in { + val inputDf = spark.createDataFrame(Seq( + ("col1", 12), + ("col2", 23) + )) + val inputSchema = com.dimajix.flowman.types.StructType.of(inputDf.schema) + val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = SchemaMapping( - Mapping.Properties(session.context), + Mapping.Properties(session.context, name = "map"), MappingOutputIdentifier("myview"), - Seq("_2" -> "int") + Seq(Field("_2", FieldType.of("int"))) ) mapping.input should be (MappingOutputIdentifier("myview")) - mapping.columns should be (Seq("_2" -> "int")) + mapping.columns should be (Seq(Field("_2", FieldType.of("int")))) mapping.inputs should be (Seq(MappingOutputIdentifier("myview"))) + mapping.output should be (MappingOutputIdentifier("map:main")) + mapping.identifier should be (MappingIdentifier("map")) - val result = mapping.execute(executor, Map(MappingOutputIdentifier("myview") -> df))("main") - .orderBy("_2").collect() - result.size should be (2) - result(0) should be (Row(12)) - result(1) should be (Row(23)) + mapping.describe(executor, Map(MappingOutputIdentifier("myview") -> inputSchema)) should be (Map( + "main" -> com.dimajix.flowman.types.StructType(Seq(Field("_2", FieldType.of("int")))) + )) + mapping.describe(executor, Map(MappingOutputIdentifier("myview") -> inputSchema), "main") should be ( + com.dimajix.flowman.types.StructType(Seq(Field("_2", FieldType.of("int")))) + ) + + val result = mapping.execute(executor, Map(MappingOutputIdentifier("myview") -> inputDf))("main") + .orderBy("_2") + result.collect() should be (Seq( + Row(12), + Row(23) + )) } it should "add NULL columns for missing columns" in { @@ -66,17 +112,20 @@ class SchemaMappingTest extends FlatSpec with Matchers with LocalSparkSession { )) val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val mapping = SchemaMapping( Mapping.Properties(session.context), MappingOutputIdentifier("myview"), - Seq("_2" -> "int", "new" -> "string") + Seq( + Field("_2", FieldType.of("int")), + Field("new", FieldType.of("string")) + ) ) mapping.input should be (MappingOutputIdentifier("myview")) - mapping.columns should be (Seq("_2" -> "int", "new" -> "string")) mapping.inputs should be (Seq(MappingOutputIdentifier("myview"))) + mapping.outputs should be (Seq("main")) val result = mapping.execute(executor, Map(MappingOutputIdentifier("myview") -> df))("main") .orderBy("_2") @@ -84,40 +133,11 @@ class SchemaMappingTest extends FlatSpec with Matchers with LocalSparkSession { StructField("_2", IntegerType, false), StructField("new", StringType, true) ))) - val rows = result.collect() - rows.size should be (2) - rows(0) should be (Row(12, null)) - rows(1) should be (Row(23, null)) - } - - "An appropriate Dataflow" should "be readable from YML" in { - val spec = - """ - |mappings: - | t1: - | kind: schema - | input: t0 - | columns: - | _2: string - | _1: string - """.stripMargin - val project = Module.read.string(spec).toProject("project") - val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor - val context = session.getContext(project) - - project.mappings.size should be (1) - project.mappings.contains("t0") should be (false) - project.mappings.contains("t1") should be (true) - - val df = spark.createDataFrame(Seq( - ("col1", 12), - ("col2", 23) + val rows = result.collect() + rows should be (Seq( + Row(12, null), + Row(23, null) )) - - val mapping = context.getMapping(MappingIdentifier("t1")) - mapping.execute(executor, Map(MappingOutputIdentifier("t0") -> df)) } - } diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SortMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SortMappingTest.scala new file mode 100644 index 000000000..b0f9482dd --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SortMappingTest.scala @@ -0,0 +1,101 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import org.apache.spark.sql.functions.col +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class SortMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "A SortMapping" should "be parseable" in { + val spec = + """ + |mappings: + | m1: + | kind: sort + | input: some_mapping + | columns: + | c1: ASC + | c2: desc nulls first + """.stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().build() + val context = session.getContext(project) + + val mapping = project.mappings("m1") + mapping shouldBe a[SortMappingSpec] + + val instance = context.getMapping(MappingIdentifier("m1")) + instance shouldBe a[SortMapping] + + val typedInstance = instance.asInstanceOf[SortMapping] + typedInstance.input should be (MappingOutputIdentifier("some_mapping")) + typedInstance.outputs should be (Seq("main")) + typedInstance.columns should be (Seq( + "c1" -> SortOrder(Ascending, NullsFirst), + "c2" -> SortOrder(Descending, NullsFirst) + )) + } + + it should "work" in { + val spark = this.spark + import spark.implicits._ + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + + val input = spark.range(10).repartition(10) + .toDF() + .withColumn("c1", col("id")%3) + .withColumn("c2", col("id")%7) + val inputSchema = StructType.of(input.schema) + + val mapping = SortMapping( + Mapping.Properties(session.context), + MappingOutputIdentifier("input"), + Seq( + "c1" -> SortOrder(Ascending, NullsFirst), + "id" -> SortOrder(Descending, NullsFirst) + ) + ) + + mapping.describe(executor, Map(MappingOutputIdentifier("input") -> inputSchema)) should be (Map("main" -> inputSchema)) + + val result = mapping.execute(executor, Map(MappingOutputIdentifier("input") -> input))("main") + result.as[(Long,Long,Long)].collect() should be (Seq( + (9,0,2), + (6,0,6), + (3,0,3), + (0,0,0), + (7,1,0), + (4,1,4), + (1,1,1), + (8,2,1), + (5,2,5), + (2,2,2) + )) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SortOrderTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SortOrderTest.scala new file mode 100644 index 000000000..1d060320b --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SortOrderTest.scala @@ -0,0 +1,42 @@ +/* + * Copyright 2018-2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + + +class SortOrderTest extends AnyFlatSpec with Matchers { + "The SortOrder" should "be parsable" in { + SortOrder.of("desc") should be (SortOrder(Descending, NullsLast)) + SortOrder.of(" desc ") should be (SortOrder(Descending, NullsLast)) + SortOrder.of("desc nulls last") should be (SortOrder(Descending, NullsLast)) + SortOrder.of("desc nulls first ") should be (SortOrder(Descending, NullsFirst)) + SortOrder.of("DESC NULLS FIRST ") should be (SortOrder(Descending, NullsFirst)) + + SortOrder.of("asc") should be (SortOrder(Ascending, NullsFirst)) + SortOrder.of(" asc ") should be (SortOrder(Ascending, NullsFirst)) + SortOrder.of("asc nulls last") should be (SortOrder(Ascending, NullsLast)) + SortOrder.of(" asc nulls first ") should be (SortOrder(Ascending, NullsFirst)) + SortOrder.of("ASC NULLS FIRST ") should be (SortOrder(Ascending, NullsFirst)) + + an[IllegalArgumentException] should be thrownBy(SortOrder.of("lala")) + an[IllegalArgumentException] should be thrownBy(SortOrder.of("desc nulls")) + an[IllegalArgumentException] should be thrownBy(SortOrder.of("desc nulls lala")) + an[IllegalArgumentException] should be thrownBy(SortOrder.of("desc lala lala")) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SqlMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SqlMappingTest.scala index d0764c47c..edd10d436 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SqlMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/SqlMappingTest.scala @@ -20,17 +20,17 @@ import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session -import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.spark.testing.LocalSparkSession -class SqlMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class SqlMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "An multi line SQL Script" should "be readable from YML" in { val spec = @@ -160,7 +160,7 @@ class SqlMappingTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val df = executor.spark.createDataFrame(Seq( diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/TemplateMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/TemplateMappingTest.scala index b12f9a746..f13b012ba 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/TemplateMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/TemplateMappingTest.scala @@ -16,16 +16,16 @@ package com.dimajix.flowman.spec.mapping -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session -import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module -class TemplateMappingTest extends FlatSpec with Matchers { +class TemplateMappingTest extends AnyFlatSpec with Matchers { "A TemplateMapping" should "work" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/TransitiveChildrenMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/TransitiveChildrenMappingTest.scala index 47bbe491e..3f94278c3 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/TransitiveChildrenMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/TransitiveChildrenMappingTest.scala @@ -17,8 +17,8 @@ package com.dimajix.flowman.spec.mapping import org.apache.spark.sql.types._ -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping @@ -28,7 +28,7 @@ import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class TransitiveChildrenMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class TransitiveChildrenMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "A TransitiveMapping" should "be parseable" in { val spec = """ @@ -51,7 +51,7 @@ class TransitiveChildrenMappingTest extends FlatSpec with Matchers with LocalSpa import spark.implicits._ val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val inputDf = Seq( (Some(1),Some(2)), diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UnitMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UnitMappingTest.scala index 1317cb8ac..09cdebec7 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UnitMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UnitMappingTest.scala @@ -17,17 +17,17 @@ package com.dimajix.flowman.spec.mapping import org.apache.spark.sql.DataFrame -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session -import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.spark.testing.LocalSparkSession -class UnitMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class UnitMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { private var inputDf0 : DataFrame = _ private var inputDf1 : DataFrame = _ @@ -75,7 +75,7 @@ class UnitMappingTest extends FlatSpec with Matchers with LocalSparkSession { val session = Session.builder().withSparkSession(spark).build() val context = session.getContext(project) - val executor = session.executor + val executor = session.execution val instance0 = context.getMapping(MappingIdentifier("instance_0")) instance0.inputs should be (Seq()) @@ -115,7 +115,7 @@ class UnitMappingTest extends FlatSpec with Matchers with LocalSparkSession { val session = Session.builder().withSparkSession(spark).build() val context = session.getContext(project) - val executor = session.executor + val executor = session.execution val unit = context.getMapping(MappingIdentifier("macro")) unit.inputs should be (Seq(MappingOutputIdentifier("outside"))) @@ -148,7 +148,7 @@ class UnitMappingTest extends FlatSpec with Matchers with LocalSparkSession { val session = Session.builder().withSparkSession(spark).build() val context = session.getContext(project) - val executor = session.executor + val executor = session.execution val instance0 = context.getMapping(MappingIdentifier("alias")) instance0.inputs should be (Seq(MappingOutputIdentifier("macro:input"))) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UnpackJsonMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UnpackJsonMappingTest.scala index 0377d5343..f2710deff 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UnpackJsonMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UnpackJsonMappingTest.scala @@ -22,17 +22,17 @@ import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session -import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier import com.dimajix.flowman.model.Module import com.dimajix.spark.testing.LocalSparkSession -class UnpackJsonMappingTest extends FlatSpec with Matchers with LocalSparkSession { +class UnpackJsonMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The UnpackJsonMapping" should "be parseable" in { val spec = """ @@ -90,7 +90,7 @@ class UnpackJsonMappingTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val input = executor.spark.createDataFrame(Seq( @@ -139,7 +139,7 @@ class UnpackJsonMappingTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val input = executor.spark.createDataFrame(Seq( @@ -183,7 +183,7 @@ class UnpackJsonMappingTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val input = executor.spark.createDataFrame(Seq( diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UpdateMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UpsertMappingTest.scala similarity index 92% rename from flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UpdateMappingTest.scala rename to flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UpsertMappingTest.scala index 66c84d34b..f108a521b 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UpdateMappingTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/UpsertMappingTest.scala @@ -17,8 +17,8 @@ package com.dimajix.flowman.spec.mapping import org.apache.spark.sql.Row -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Mapping @@ -27,12 +27,12 @@ import com.dimajix.flowman.model.Module import com.dimajix.spark.testing.LocalSparkSession -class UpdateMappingTest extends FlatSpec with Matchers with LocalSparkSession { - "The UpdateMapping" should "merge in updates" in { +class UpsertMappingTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "The UpsertMapping" should "merge in updates" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution - val mapping = UpdateMapping( + val mapping = UpsertMapping( Mapping.Properties(session.context), MappingOutputIdentifier("prev"), MappingOutputIdentifier("updates"), @@ -64,9 +64,9 @@ class UpdateMappingTest extends FlatSpec with Matchers with LocalSparkSession { it should "reorder columns correctly" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution - val mapping = UpdateMapping( + val mapping = UpsertMapping( Mapping.Properties(session.context), MappingOutputIdentifier("prev"), MappingOutputIdentifier("updates"), @@ -99,9 +99,9 @@ class UpdateMappingTest extends FlatSpec with Matchers with LocalSparkSession { it should "add missing columns from updates" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution - val mapping = UpdateMapping( + val mapping = UpsertMapping( Mapping.Properties(session.context), MappingOutputIdentifier("prev"), MappingOutputIdentifier("updates"), @@ -133,9 +133,9 @@ class UpdateMappingTest extends FlatSpec with Matchers with LocalSparkSession { it should "remove entries with duplicate keys" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution - val mapping = UpdateMapping( + val mapping = UpsertMapping( Mapping.Properties(session.context), MappingOutputIdentifier("prev"), MappingOutputIdentifier("updates"), @@ -183,7 +183,7 @@ class UpdateMappingTest extends FlatSpec with Matchers with LocalSparkSession { """ |mappings: | t1: - | kind: update + | kind: upsert | input: t0 | updates: t1 | filter: "operation != 'DELETE'" @@ -198,9 +198,9 @@ class UpdateMappingTest extends FlatSpec with Matchers with LocalSparkSession { project.mappings.contains("t1") should be (true) val mapping = project.mappings("t1") - mapping shouldBe an[UpdateMappingSpec] + mapping shouldBe an[UpsertMappingSpec] - val updateMapping = mapping.instantiate(session.context).asInstanceOf[UpdateMapping] + val updateMapping = mapping.instantiate(session.context).asInstanceOf[UpsertMapping] updateMapping.inputs should be (Seq(MappingOutputIdentifier("t0"),MappingOutputIdentifier("t1"))) updateMapping.input should be (MappingOutputIdentifier("t0")) updateMapping.updates should be (MappingOutputIdentifier("t1")) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ValuesMappingTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ValuesMappingTest.scala new file mode 100644 index 000000000..38393ca22 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/mapping/ValuesMappingTest.scala @@ -0,0 +1,208 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.mapping + +import org.apache.spark.sql.Row +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Execution +import com.dimajix.flowman.execution.RootContext +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Mapping +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.model.MappingOutputIdentifier +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.model.Template +import com.dimajix.flowman.spec.schema.EmbeddedSchema +import com.dimajix.flowman.types.ArrayRecord +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.IntegerType +import com.dimajix.flowman.types.StringType +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class ValuesMappingTest extends AnyFlatSpec with Matchers with MockFactory with LocalSparkSession { + "The ValuesMapping" should "be parseable with a schema" in { + val spec = + """ + |mappings: + | fake: + | kind: values + | records: + | - ["a",12,3] + | - [cat,"",7] + | - [dog,null,8] + | schema: + | kind: embedded + | fields: + | - name: str_col + | type: string + | - name: int_col + | type: integer + |""".stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().build() + val context = session.getContext(project) + + val mapping = context.getMapping(MappingIdentifier("fake")).asInstanceOf[ValuesMapping] + mapping shouldBe a[ValuesMapping] + + mapping.category should be ("mapping") + mapping.kind should be ("values") + mapping.identifier should be (MappingIdentifier("project/fake")) + mapping.output should be (MappingOutputIdentifier("project/fake:main")) + mapping.outputs should be (Seq("main")) + mapping.records should be (Seq( + ArrayRecord("a","12","3"), + ArrayRecord("cat","","7"), + ArrayRecord("dog",null,"8") + )) + } + + it should "be parseable with columns" in { + val spec = + """ + |mappings: + | fake: + | kind: values + | records: + | - ["a",12,3] + | - [cat,"",7] + | - [dog,null,8] + | columns: + | str_col: string + | int_col: integer + |""".stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().build() + val context = session.getContext(project) + + val mapping = context.getMapping(MappingIdentifier("fake")).asInstanceOf[ValuesMapping] + mapping shouldBe a[ValuesMapping] + + mapping.category should be ("mapping") + mapping.kind should be ("values") + mapping.identifier should be (MappingIdentifier("project/fake")) + mapping.output should be (MappingOutputIdentifier("project/fake:main")) + mapping.outputs should be (Seq("main")) + mapping.records should be (Seq( + ArrayRecord("a","12","3"), + ArrayRecord("cat","","7"), + ArrayRecord("dog",null,"8") + )) + } + + it should "work with specified records and schema" in { + val mappingTemplate = mock[Template[Mapping]] + + val project = Project( + "my_project", + mappings = Map( + "const" -> mappingTemplate + ) + ) + val schema = new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + val executor = session.execution + + val mockMapping = ValuesMapping( + Mapping.Properties(context, "const"), + schema = Some(EmbeddedSchema( + Schema.Properties(context), + fields = schema.fields + )), + records = Seq( + ArrayRecord("lala","12"), + ArrayRecord("lolo","13"), + ArrayRecord("",null) + ) + ) + + (mappingTemplate.instantiate _).expects(context).returns(mockMapping) + val mapping = context.getMapping(MappingIdentifier("const")) + + mapping.inputs should be (Seq()) + mapping.outputs should be (Seq("main")) + mapping.describe(executor, Map()) should be (Map("main" -> schema)) + mapping.describe(executor, Map(), "main") should be (schema) + + val df = executor.instantiate(mapping, "main") + df.schema should be (schema.sparkType) + df.collect() should be (Seq( + Row("lala", 12), + Row("lolo", 13), + Row(null,null) + )) + } + + it should "work with specified records and columns" in { + val mappingTemplate = mock[Template[Mapping]] + + val project = Project( + "my_project", + mappings = Map( + "const" -> mappingTemplate + ) + ) + val schema = new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + val executor = session.execution + + val mockMapping = ValuesMapping( + Mapping.Properties(context, "const"), + columns = schema.fields, + records = Seq( + ArrayRecord("lala","12"), + ArrayRecord("lolo","13"), + ArrayRecord("",null) + ) + ) + + (mappingTemplate.instantiate _).expects(context).returns(mockMapping) + val mapping = context.getMapping(MappingIdentifier("const")) + + mapping.inputs should be (Seq()) + mapping.outputs should be (Seq("main")) + mapping.describe(executor, Map()) should be (Map("main" -> schema)) + mapping.describe(executor, Map(), "main") should be (schema) + + val df = executor.instantiate(mapping, "main") + df.schema should be (schema.sparkType) + df.collect() should be (Seq( + Row("lala", 12), + Row("lolo", 13), + Row(null,null) + )) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/AvroRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/AvroRelationTest.scala index 7cd06f7cc..c342a281a 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/AvroRelationTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/AvroRelationTest.scala @@ -17,8 +17,8 @@ package com.dimajix.flowman.spec.relation import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No import com.dimajix.common.Yes @@ -28,7 +28,7 @@ import com.dimajix.flowman.model.RelationIdentifier import com.dimajix.spark.testing.LocalSparkSession -class AvroRelationTest extends FlatSpec with Matchers with LocalSparkSession { +class AvroRelationTest extends AnyFlatSpec with Matchers with LocalSparkSession { "An Avro Hive Table" should "be writeable" in (if (hiveSupported && spark.version >= "2.4") { val spark = this.spark import spark.implicits._ @@ -61,7 +61,7 @@ class AvroRelationTest extends FlatSpec with Matchers with LocalSparkSession { """.stripMargin val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -118,7 +118,7 @@ class AvroRelationTest extends FlatSpec with Matchers with LocalSparkSession { """.stripMargin val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/FileRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/FileRelationTest.scala index 348d031bc..1a26d1ab5 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/FileRelationTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/FileRelationTest.scala @@ -22,36 +22,38 @@ import java.io.PrintWriter import java.nio.file.FileAlreadyExistsException import java.nio.file.Paths +import com.google.common.io.Resources import org.apache.hadoop.fs.Path import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No import com.dimajix.common.Yes import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.execution.Session -import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.model.MappingIdentifier import com.dimajix.flowman.model.Module import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.types.Field import com.dimajix.flowman.types.SingleValue import com.dimajix.flowman.{types => ftypes} import com.dimajix.spark.testing.LocalSparkSession -class FileRelationTest extends FlatSpec with Matchers with LocalSparkSession { +class FileRelationTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The FileRelation" should "be parseable" in { + val inputPath = Resources.getResource("data/data_1.csv") val spec = - """ + s""" |relations: | t0: | kind: file - | location: test/data/data_1.csv + | location: ${inputPath} | format: csv | schema: | kind: embedded @@ -67,7 +69,7 @@ class FileRelationTest extends FlatSpec with Matchers with LocalSparkSession { project.relations.keys should contain("t0") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -75,7 +77,7 @@ class FileRelationTest extends FlatSpec with Matchers with LocalSparkSession { val fileRelation = relation.asInstanceOf[FileRelation] fileRelation.format should be ("csv") - fileRelation.location should be (new Path("test/data/data_1.csv")) + fileRelation.location should be (new Path(inputPath.toURI)) val df = relation.read(executor, None) df.schema should be (StructType( @@ -109,7 +111,7 @@ class FileRelationTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("local")) @@ -186,7 +188,7 @@ class FileRelationTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("local")) @@ -316,7 +318,7 @@ class FileRelationTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("local")) @@ -429,7 +431,7 @@ class FileRelationTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("local")) @@ -491,7 +493,7 @@ class FileRelationTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val mapping = context.getMapping(MappingIdentifier("input")) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/GenericRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/GenericRelationTest.scala new file mode 100644 index 000000000..c9f0ba6d0 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/GenericRelationTest.scala @@ -0,0 +1,104 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.relation + +import com.google.common.io.Resources +import org.apache.spark.sql.types.StringType +import org.apache.spark.sql.types.StructField +import org.apache.spark.sql.types.StructType +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.model.Relation +import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.spec.schema.EmbeddedSchema +import com.dimajix.flowman.types.Field +import com.dimajix.spark.testing.LocalSparkSession + + +class GenericRelationTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "The GenericRelation" should "be parseable" in { + val spec = + """ + |relations: + | t0: + | kind: generic + | format: csv + | options: + | path: test/data/data_1.csv + | schema: + | kind: embedded + | fields: + | - name: f1 + | type: string + | - name: f2 + | type: string + | - name: f3 + | type: string + |""".stripMargin + val project = Module.read.string(spec).toProject("project") + project.relations.keys should contain("t0") + + val session = Session.builder().build() + val context = session.getContext(project) + + val relation = context.getRelation(RelationIdentifier("t0")) + relation.kind should be ("generic") + + val fileRelation = relation.asInstanceOf[GenericRelation] + fileRelation.format should be ("csv") + fileRelation.options should be (Map("path" -> "test/data/data_1.csv")) + } + + it should "read data" in { + val session = Session.builder().withSparkSession(spark).build() + val executor = session.execution + + val schema = EmbeddedSchema( + Schema.Properties(session.context), + fields = Seq( + Field("f1", com.dimajix.flowman.types.StringType), + Field("f2", com.dimajix.flowman.types.StringType), + Field("f3", com.dimajix.flowman.types.StringType) + ) + ) + val relation = GenericRelation( + Relation.Properties( + session.context + ), + Some(schema), + "csv", + Map("path" -> Resources.getResource("data/data_1.csv").toString) + ) + + // Verify schema + relation.fields should be (schema.fields) + + // Verify read operation + val df = relation.read(executor, None) + df.schema should be (StructType( + StructField("f1", StringType) :: + StructField("f2", StringType) :: + StructField("f3", StringType) :: + Nil + )) + df.collect() + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveTableRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveTableRelationTest.scala index a0c298268..ebd70efe0 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveTableRelationTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveTableRelationTest.scala @@ -29,11 +29,10 @@ import org.apache.spark.sql.types.IntegerType import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No -import com.dimajix.common.Unknown import com.dimajix.common.Yes import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.MappingIdentifier @@ -48,7 +47,7 @@ import com.dimajix.spark.testing.LocalSparkSession import com.dimajix.spark.testing.QueryTest -class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSession with QueryTest { +class HiveTableRelationTest extends AnyFlatSpec with Matchers with LocalSparkSession with QueryTest { "The HiveTableRelation" should "support create" in { val spec = """ @@ -74,7 +73,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -155,7 +154,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -211,7 +210,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -276,7 +275,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -337,7 +336,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -383,7 +382,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -430,7 +429,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -480,7 +479,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -528,7 +527,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -577,7 +576,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -627,7 +626,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -703,7 +702,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -778,7 +777,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -884,7 +883,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation_1 = context.getRelation(RelationIdentifier("t1")) @@ -981,7 +980,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation_1 = context.getRelation(RelationIdentifier("t1")) @@ -1117,7 +1116,7 @@ class HiveTableRelationTest extends FlatSpec with Matchers with LocalSparkSessio val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val mapping = context.getMapping(MappingIdentifier("input")) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveUnionTableRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveUnionTableRelationTest.scala index 70496fb26..743ad474e 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveUnionTableRelationTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveUnionTableRelationTest.scala @@ -29,16 +29,16 @@ import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType import org.apache.spark.sql.types.TimestampType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No import com.dimajix.common.Unknown import com.dimajix.common.Yes import com.dimajix.flowman.execution.Session -import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.model.Module import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.types.Field import com.dimajix.flowman.types.SingleValue import com.dimajix.flowman.{types => ftypes} @@ -46,7 +46,7 @@ import com.dimajix.spark.testing.LocalSparkSession import com.dimajix.spark.testing.QueryTest -class HiveUnionTableRelationTest extends FlatSpec with Matchers with LocalSparkSession with QueryTest { +class HiveUnionTableRelationTest extends AnyFlatSpec with Matchers with LocalSparkSession with QueryTest { "The HiveUnionTableRelation" should "support the full lifecycle" in (if (hiveSupported) { val spec = """ @@ -72,7 +72,7 @@ class HiveUnionTableRelationTest extends FlatSpec with Matchers with LocalSparkS val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -191,7 +191,7 @@ class HiveUnionTableRelationTest extends FlatSpec with Matchers with LocalSparkS val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) // == Create =================================================================== @@ -273,7 +273,7 @@ class HiveUnionTableRelationTest extends FlatSpec with Matchers with LocalSparkS val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -418,7 +418,7 @@ class HiveUnionTableRelationTest extends FlatSpec with Matchers with LocalSparkS val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation_1 = context.getRelation(RelationIdentifier("t1")) @@ -608,7 +608,7 @@ class HiveUnionTableRelationTest extends FlatSpec with Matchers with LocalSparkS val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation_1 = context.getRelation(RelationIdentifier("t1")) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveViewRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveViewRelationTest.scala index 01e990f53..425221cb7 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveViewRelationTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/HiveViewRelationTest.scala @@ -17,21 +17,21 @@ package com.dimajix.flowman.spec.relation import org.apache.spark.sql.catalyst.TableIdentifier -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No import com.dimajix.common.Yes import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.MappingOutputIdentifier -import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.model.Module import com.dimajix.flowman.model.Relation import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.spark.testing.LocalSparkSession -class HiveViewRelationTest extends FlatSpec with Matchers with LocalSparkSession { +class HiveViewRelationTest extends AnyFlatSpec with Matchers with LocalSparkSession { "A HiveViewRelation" should "be creatable from a mapping" in { val spec = """ @@ -58,7 +58,7 @@ class HiveViewRelationTest extends FlatSpec with Matchers with LocalSparkSession val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) context.getRelation(RelationIdentifier("t0")).create(executor) @@ -142,7 +142,7 @@ class HiveViewRelationTest extends FlatSpec with Matchers with LocalSparkSession val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) context.getRelation(RelationIdentifier("t0")).create(executor) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/JdbcRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/JdbcRelationTest.scala index 37a82e55a..de5144752 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/JdbcRelationTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/JdbcRelationTest.scala @@ -25,8 +25,8 @@ import scala.collection.JavaConverters._ import org.apache.spark.sql.execution.datasources.jdbc.DriverRegistry import org.apache.spark.sql.execution.datasources.jdbc.DriverWrapper -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No import com.dimajix.common.Yes @@ -38,7 +38,7 @@ import com.dimajix.flowman.types.SingleValue import com.dimajix.spark.testing.LocalSparkSession -class JdbcRelationTest extends FlatSpec with Matchers with LocalSparkSession { +class JdbcRelationTest extends AnyFlatSpec with Matchers with LocalSparkSession { def withDatabase[T](driverClass:String, url:String)(fn:(Statement) => T) : T = { DriverRegistry.register(driverClass) val driver: Driver = DriverManager.getDrivers.asScala.collectFirst { @@ -92,7 +92,7 @@ class JdbcRelationTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -198,7 +198,7 @@ class JdbcRelationTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("t0")) @@ -345,7 +345,7 @@ class JdbcRelationTest extends FlatSpec with Matchers with LocalSparkSession { val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation_t0 = context.getRelation(RelationIdentifier("t0")) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/LocalRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/LocalRelationTest.scala index 9c4f900c7..d43657430 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/LocalRelationTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/LocalRelationTest.scala @@ -22,22 +22,21 @@ import java.nio.file.Paths import org.apache.hadoop.fs.Path import org.scalatest.BeforeAndAfter -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No -import com.dimajix.common.Unknown import com.dimajix.common.Yes import com.dimajix.flowman.execution.OutputMode import com.dimajix.flowman.execution.Session -import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.model.Module import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.types.SingleValue import com.dimajix.spark.testing.LocalSparkSession -class LocalRelationTest extends FlatSpec with Matchers with BeforeAndAfter with LocalSparkSession { +class LocalRelationTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The LocalRelation" should "be able to create local directories" in { val outputPath = Paths.get(tempDir.toString, "csv", "test") val spec = @@ -60,7 +59,7 @@ class LocalRelationTest extends FlatSpec with Matchers with BeforeAndAfter with val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("local")) @@ -126,7 +125,7 @@ class LocalRelationTest extends FlatSpec with Matchers with BeforeAndAfter with val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("local")) @@ -187,7 +186,7 @@ class LocalRelationTest extends FlatSpec with Matchers with BeforeAndAfter with val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("local")) @@ -238,7 +237,7 @@ class LocalRelationTest extends FlatSpec with Matchers with BeforeAndAfter with val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("local")) @@ -315,7 +314,7 @@ class LocalRelationTest extends FlatSpec with Matchers with BeforeAndAfter with val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val relation = context.getRelation(RelationIdentifier("local")) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/MockRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/MockRelationTest.scala new file mode 100644 index 000000000..03edebc1f --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/MockRelationTest.scala @@ -0,0 +1,288 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.relation + +import org.apache.spark.sql.Row +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.common.No +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.RootContext +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.Relation +import com.dimajix.flowman.model.RelationIdentifier +import com.dimajix.flowman.model.Schema +import com.dimajix.flowman.model.Template +import com.dimajix.flowman.spec.schema.EmbeddedSchema +import com.dimajix.flowman.types.ArrayRecord +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.IntegerType +import com.dimajix.flowman.types.StringType +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class MockRelationTest extends AnyFlatSpec with Matchers with MockFactory with LocalSparkSession{ + "A MockRelation" should "be parsable" in { + val spec = + """ + |relations: + | empty: + | kind: null + | schema: + | kind: embedded + | fields: + | - name: str_col + | type: string + | - name: int_col + | type: integer + | + | mock: + | kind: mock + | relation: empty + | records: + | - ["a",12,3] + | - [cat,"",7] + | - [dog,null,8] + |""".stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().build() + val context = session.getContext(project) + + val relation = context.getRelation(RelationIdentifier("mock")).asInstanceOf[MockRelation] + relation shouldBe a[MockRelation] + + relation.category should be ("relation") + relation.kind should be ("mock") + relation.relation should be (RelationIdentifier("empty")) + relation.records should be (Seq( + ArrayRecord("a","12","3"), + ArrayRecord("cat","","7"), + ArrayRecord("dog",null,"8") + )) + } + + it should "support create, write and destroy" in { + val baseRelationTemplate = mock[Template[Relation]] + val mockRelationTemplate = mock[Template[Relation]] + + val project = Project( + "my_project", + relations = Map( + "base" -> baseRelationTemplate, + "mock" -> mockRelationTemplate + ) + ) + + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + val executor = session.execution + + val mockRelation = MockRelation( + Relation.Properties(context, "mock"), + RelationIdentifier("base") + ) + + (mockRelationTemplate.instantiate _).expects(context).returns(mockRelation) + val relation = context.getRelation(RelationIdentifier("mock")) + relation shouldBe a[MockRelation] + relation.category should be ("relation") + + relation.requires should be (Set()) + relation.provides should be (Set()) + relation.resources(Map()) should be (Set()) + + // Initial state + relation.exists(executor) should be (No) + relation.loaded(executor, Map()) should be (No) + + // Create + relation.create(executor) + relation.exists(executor) should be (Yes) + relation.loaded(executor, Map()) should be (No) + + // Write + relation.write(executor, spark.emptyDataFrame) + relation.exists(executor) should be (Yes) + relation.loaded(executor, Map()) should be (Yes) + + // Truncate + relation.truncate(executor) + relation.exists(executor) should be (Yes) + relation.loaded(executor, Map()) should be (No) + + // Destroy + relation.destroy(executor) + relation.exists(executor) should be (No) + relation.loaded(executor, Map()) should be (No) + } + + it should "read empty DataFrames" in { + val baseRelationTemplate = mock[Template[Relation]] + val baseRelation = mock[Relation] + val mockRelationTemplate = mock[Template[Relation]] + + val project = Project( + "my_project", + relations = Map( + "base" -> baseRelationTemplate, + "mock" -> mockRelationTemplate + ) + ) + + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + val executor = session.execution + + val mockRelation = MockRelation( + Relation.Properties(context, "mock"), + RelationIdentifier("base") + ) + val schema = EmbeddedSchema( + Schema.Properties(context), + fields = Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + ) + ) + + (mockRelationTemplate.instantiate _).expects(context).returns(mockRelation) + val relation = context.getRelation(RelationIdentifier("mock")) + + (baseRelationTemplate.instantiate _).expects(context).returns(baseRelation) + (baseRelation.schema _).expects().anyNumberOfTimes().returns(Some(schema)) + relation.schema should be (Some(schema)) + + (baseRelation.partitions _).expects().anyNumberOfTimes().returns(Seq()) + relation.partitions should be (Seq()) + relation.fields should be (schema.fields) + + (baseRelation.describe _).expects(executor).anyNumberOfTimes().returns(new StructType(schema.fields)) + relation.describe(executor) should be (new StructType(schema.fields)) + + val df1 = relation.read(executor, None, Map()) + df1.schema should be (new StructType(schema.fields).sparkType) + df1.count() should be (0) + + val readSchema = StructType(Seq(Field("int_col", IntegerType))) + val df2 = relation.read(executor, Some(readSchema.sparkType)) + df2.schema should be (readSchema.sparkType) + df2.count() should be (0) + } + + it should "work nicely with overrides" in { + val baseRelationTemplate = mock[Template[Relation]] + val baseRelation = mock[Relation] + val mockRelationTemplate = mock[Template[Relation]] + + val project = Project( + "my_project", + relations = Map( + "base" -> baseRelationTemplate + ) + ) + + val session = Session.builder().withSparkSession(spark).build() + val context = RootContext.builder(session.context) + .overrideRelations(Map( + RelationIdentifier("base", "my_project") -> mockRelationTemplate + )) + .build() + .getProjectContext(project) + val executor = session.execution + + val mockRelation = MockRelation( + Relation.Properties(context, "base"), + RelationIdentifier("base") + ) + val schema = EmbeddedSchema( + Schema.Properties(context), + fields = Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + ) + ) + + (mockRelationTemplate.instantiate _).expects(context).returns(mockRelation) + val relation = context.getRelation(RelationIdentifier("base")) + + (baseRelationTemplate.instantiate _).expects(context).returns(baseRelation) + (baseRelation.schema _).expects().anyNumberOfTimes().returns(Some(schema)) + relation.schema should be (Some(schema)) + + (baseRelation.partitions _).expects().anyNumberOfTimes().returns(Seq()) + relation.partitions should be (Seq()) + relation.fields should be (schema.fields) + + (baseRelation.describe _).expects(executor).anyNumberOfTimes().returns(new StructType(schema.fields)) + relation.describe(executor) should be (new StructType(schema.fields)) + } + + it should "return provided records as a DataFrame" in { + val baseRelationTemplate = mock[Template[Relation]] + val baseRelation = mock[Relation] + val mockRelationTemplate = mock[Template[Relation]] + + val project = Project( + "my_project", + relations = Map( + "base" -> baseRelationTemplate, + "mock" -> mockRelationTemplate + ) + ) + + val session = Session.builder().withSparkSession(spark).build() + val context = session.getContext(project) + val executor = session.execution + + val mockRelation = MockRelation( + Relation.Properties(context, "mock"), + RelationIdentifier("base"), + Seq( + ArrayRecord("lala","12"), + ArrayRecord("lolo","13"), + ArrayRecord("",null) + ) + ) + val schema = EmbeddedSchema( + Schema.Properties(context), + fields = Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + ) + ) + + (mockRelationTemplate.instantiate _).expects(context).returns(mockRelation) + val relation = context.getRelation(RelationIdentifier("mock")) + + (baseRelationTemplate.instantiate _).expects(context).returns(baseRelation) + (baseRelation.schema _).expects().anyNumberOfTimes().returns(Some(schema)) + (baseRelation.partitions _).expects().anyNumberOfTimes().returns(Seq()) + val df = relation.read(executor, None, Map()) + df.schema should be (new StructType(schema.fields).sparkType) + df.collect() should be (Seq( + Row("lala", 12), + Row("lolo", 13), + Row(null,null) + )) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/NullRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/NullRelationTest.scala index 65afb0a76..bcdac7e5c 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/NullRelationTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/NullRelationTest.scala @@ -19,10 +19,9 @@ package com.dimajix.flowman.spec.relation import org.apache.spark.sql.types.StringType import org.apache.spark.sql.types.StructField import org.apache.spark.sql.types.StructType -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -import com.dimajix.common.No import com.dimajix.common.Unknown import com.dimajix.common.Yes import com.dimajix.flowman.execution.Session @@ -30,10 +29,10 @@ import com.dimajix.flowman.model.Relation import com.dimajix.spark.testing.LocalSparkSession -class NullRelationTest extends FlatSpec with Matchers with LocalSparkSession { +class NullRelationTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The NullRelation" should "support the full lifecycle" in { val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val relation = NullRelation(Relation.Properties(session.context)) val schema = StructType( @@ -47,7 +46,7 @@ class NullRelationTest extends FlatSpec with Matchers with LocalSparkSession { // == Read =================================================================== val df = relation.read(executor, Some(schema)) - df should not be (null) + df.count() should be (0) // == Truncate =================================================================== relation.truncate(executor) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/ProvidedRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/ProvidedRelationTest.scala index 6ea9eca38..ad2c3ed5d 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/ProvidedRelationTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/ProvidedRelationTest.scala @@ -1,7 +1,23 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + package com.dimajix.flowman.spec.relation -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.MappingIdentifier @@ -11,7 +27,7 @@ import com.dimajix.spark.testing.LocalSparkSession case class Record(c1:String, c2:String) -class ProvidedRelationTest extends FlatSpec with Matchers with LocalSparkSession { +class ProvidedRelationTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The ProvidedRelation" should "work" in { val spec = """ @@ -29,7 +45,7 @@ class ProvidedRelationTest extends FlatSpec with Matchers with LocalSparkSession project.mappings.keys should contain("dummy") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) executor.spark.emptyDataFrame.createOrReplaceTempView("my_table") diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/PluginRelationTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/RelationSpecTest.scala similarity index 63% rename from flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/PluginRelationTest.scala rename to flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/RelationSpecTest.scala index afba61933..d8ce583cb 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/PluginRelationTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/relation/RelationSpecTest.scala @@ -16,32 +16,19 @@ package com.dimajix.flowman.spec.relation -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -import com.dimajix.flowman.annotation.RelationType import com.dimajix.flowman.model.Module -import com.dimajix.flowman.spi.CustomRelationSpec +import com.dimajix.flowman.spec.annotation.RelationType @RelationType(kind = "annotatedRelation") class AnnotationRelationSpec extends NullRelationSpec { } -class PluginRelationTest extends FlatSpec with Matchers { - "A plugin" should "be used if present" in { - val spec = - """ - |relations: - | custom: - | kind: customRelation - """.stripMargin - val module = Module.read.string(spec) - module.relations.keys should contain("custom") - module.relations("custom") shouldBe a[CustomRelationSpec] - } - - "Annotated plugins should" should "be used" in { +class RelationSpecTest extends AnyFlatSpec with Matchers { + "RelationSpec" should "support custom relations" in { val spec = """ |relations: diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/AvroSchemaTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/AvroSchemaTest.scala index b3120f7c5..2ec60fc3c 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/AvroSchemaTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/AvroSchemaTest.scala @@ -18,15 +18,15 @@ package com.dimajix.flowman.spec.schema import com.google.common.io.Resources import org.apache.hadoop.fs.Path -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.spec.ObjectMapper import com.dimajix.flowman.types.StringType -class AvroSchemaTest extends FlatSpec with Matchers { +class AvroSchemaTest extends AnyFlatSpec with Matchers { "An AvroSchema" should "be declarable inline" in { val spec = """ @@ -111,6 +111,44 @@ class AvroSchemaTest extends FlatSpec with Matchers { fields(0).ftype should be (StringType) } + it should "support forced nullablility" in { + val spec = + """ + |kind: avro + |nullable: true + |spec: | + | { + | "type": "record", + | "namespace": "", + | "name": "test_schema", + | "doc": "Some Documentation", + | "fields": [ + | { + | "doc": "AccessDateTime as a string", + | "type": "string", + | "name": "AccessDateTime", + | "order": "ignore" + | } + | ] + | } + """.stripMargin + + val session = Session.builder().build() + val schemaSpec = ObjectMapper.parse[SchemaSpec](spec) + schemaSpec shouldBe an[AvroSchemaSpec] + + val result = schemaSpec.instantiate(session.context) + result shouldBe an[AvroSchema] + result.description should be (Some("Some Documentation")) + + val fields = result.fields + fields.size should be (1) + fields(0).nullable should be (true) + fields(0).name should be ("AccessDateTime") + fields(0).description should be (Some("AccessDateTime as a string")) + fields(0).ftype should be (StringType) + } + it should "be readable from an external file" in { val basedir = new Path(Resources.getResource(".").toURI) val spec = diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/EmbeddedSchemaTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/EmbeddedSchemaTest.scala index 8f2af91a2..b3ac249a9 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/EmbeddedSchemaTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/EmbeddedSchemaTest.scala @@ -16,14 +16,14 @@ package com.dimajix.flowman.spec.schema -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.spec.ObjectMapper -class EmbeddedSchemaTest extends FlatSpec with Matchers { +class EmbeddedSchemaTest extends AnyFlatSpec with Matchers { "An EmbeddedSchema" should "be parseable with explicit type" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/MappingSchemaTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/MappingSchemaTest.scala index 0a1b52bd9..566636cb1 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/MappingSchemaTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/MappingSchemaTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec.schema -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Module @@ -27,7 +27,7 @@ import com.dimajix.flowman.types.IntegerType import com.dimajix.flowman.types.StringType -class MappingSchemaTest extends FlatSpec with Matchers { +class MappingSchemaTest extends AnyFlatSpec with Matchers { "A MappingSchema" should "resolve the correct schema" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/SchemaTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/SchemaTest.scala index 0ca3a92e9..3244718da 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/SchemaTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/SchemaTest.scala @@ -19,13 +19,13 @@ package com.dimajix.flowman.spec.schema import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.dataformat.yaml.YAMLFactory import com.fasterxml.jackson.module.scala.DefaultScalaModule -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session -class SchemaTest extends FlatSpec with Matchers { +class SchemaTest extends AnyFlatSpec with Matchers { lazy val mapper = { val mapper = new ObjectMapper(new YAMLFactory()) mapper.registerModule(DefaultScalaModule) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/UnionSchemaTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/UnionSchemaTest.scala index c9dd6e632..b1a78400a 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/UnionSchemaTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/schema/UnionSchemaTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec.schema -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Session import com.dimajix.flowman.spec.ObjectMapper @@ -25,7 +25,7 @@ import com.dimajix.flowman.types.Field import com.dimajix.flowman.types.StringType -class UnionSchemaTest extends FlatSpec with Matchers { +class UnionSchemaTest extends AnyFlatSpec with Matchers { "A UnionSchema" should "work" in { val spec = """ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/BlackholeTargetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/BlackholeTargetTest.scala index 41b1f34af..27252c4e6 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/BlackholeTargetTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/BlackholeTargetTest.scala @@ -16,8 +16,8 @@ package com.dimajix.flowman.spec.target -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No import com.dimajix.common.Yes @@ -28,7 +28,7 @@ import com.dimajix.flowman.model.TargetIdentifier import com.dimajix.spark.testing.LocalSparkSession -class BlackholeTargetTest extends FlatSpec with Matchers with LocalSparkSession{ +class BlackholeTargetTest extends AnyFlatSpec with Matchers with LocalSparkSession{ "A Blackhole Target" should "be buildable" in { val spark = this.spark @@ -46,7 +46,7 @@ class BlackholeTargetTest extends FlatSpec with Matchers with LocalSparkSession{ """.stripMargin val project = Module.read.string(spec).toProject("project") val session = Session.builder().withSparkSession(spark).build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) spark.emptyDataFrame.createOrReplaceTempView("some_table") diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/CompareTargetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/CompareTargetTest.scala index b327cfa6c..ace7ed39b 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/CompareTargetTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/CompareTargetTest.scala @@ -18,21 +18,21 @@ package com.dimajix.flowman.spec.target import com.google.common.io.Resources import org.apache.hadoop.fs.Path -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.Session import com.dimajix.flowman.execution.VerificationFailedException -import com.dimajix.flowman.model.ResourceIdentifier -import com.dimajix.flowman.spec.ObjectMapper import com.dimajix.flowman.model.Dataset +import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.model.Target +import com.dimajix.flowman.spec.ObjectMapper import com.dimajix.flowman.spec.dataset.FileDataset import com.dimajix.spark.testing.LocalSparkSession -class CompareTargetTest extends FlatSpec with Matchers with LocalSparkSession { +class CompareTargetTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The CompareTarget" should "be parseable from YAML" in { val basedir = new Path(Resources.getResource(".").toURI) val spec = @@ -52,7 +52,7 @@ class CompareTargetTest extends FlatSpec with Matchers with LocalSparkSession { it should "work on same files" in { val basedir = new Path(Resources.getResource(".").toURI) val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val context = session.context val target = CompareTarget( @@ -75,7 +75,7 @@ class CompareTargetTest extends FlatSpec with Matchers with LocalSparkSession { it should "fail on non existing actual file" in { val basedir = new Path(Resources.getResource(".").toURI) val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val context = session.context val target = CompareTarget( @@ -99,7 +99,7 @@ class CompareTargetTest extends FlatSpec with Matchers with LocalSparkSession { it should "throw an exception on an non existing expected file" in { val basedir = new Path(Resources.getResource(".").toURI) val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val context = session.context val target = CompareTarget( @@ -114,7 +114,7 @@ class CompareTargetTest extends FlatSpec with Matchers with LocalSparkSession { it should "work with a directory as expected" in { val basedir = new Path(Resources.getResource(".").toURI) val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val context = session.context val target = CompareTarget( @@ -129,7 +129,7 @@ class CompareTargetTest extends FlatSpec with Matchers with LocalSparkSession { it should "work with a directory as actual" in { val basedir = new Path(Resources.getResource(".").toURI) val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val context = session.context val target = CompareTarget( @@ -144,7 +144,7 @@ class CompareTargetTest extends FlatSpec with Matchers with LocalSparkSession { it should "work with a directory as expected and actual" in { val basedir = new Path(Resources.getResource(".").toURI) val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val context = session.context val target = CompareTarget( diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/ConsoleTargetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/ConsoleTargetTest.scala new file mode 100644 index 000000000..a588c45cc --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/ConsoleTargetTest.scala @@ -0,0 +1,94 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.target + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.Phase +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Dataset +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.model.Target +import com.dimajix.flowman.spec.dataset.ValuesDataset +import com.dimajix.flowman.types.ArrayRecord +import com.dimajix.flowman.types.Field +import com.dimajix.flowman.types.IntegerType +import com.dimajix.flowman.types.StringType +import com.dimajix.flowman.types.StructType +import com.dimajix.spark.testing.LocalSparkSession + + +class ConsoleTargetTest extends AnyFlatSpec with Matchers with LocalSparkSession { + "A ConsoleTarget" should "be parseable" in { + val spec = + """ + |targets: + | custom: + | kind: console + | input: + | kind: mapping + | mapping: some_mapping + | limit: 10 + | columns: [col_a,col_b] + | csv: false + | header: true + |""".stripMargin + + val module = Module.read.string(spec) + val target = module.targets("custom") + target shouldBe an[ConsoleTargetSpec] + } + + it should "print records onto the console" in { + val session = Session.builder.withSparkSession(spark).build() + val execution = session.execution + val context = session.context + + val schema = new StructType(Seq( + Field("str_col", StringType), + Field("int_col", IntegerType) + )) + val dataset = ValuesDataset( + Dataset.Properties(context, "const"), + columns = schema.fields, + records = Seq( + ArrayRecord("lala","12"), + ArrayRecord("lolo","13"), + ArrayRecord("",null) + ) + ) + val target = ConsoleTarget( + Target.Properties(context), + dataset, + 10, + true, + false, + Seq() + ) + + target.phases should be (Set(Phase.BUILD)) + target.requires(Phase.BUILD) should be (Set()) + target.provides(Phase.BUILD) should be (Set()) + target.before should be (Seq()) + target.after should be (Seq()) + + target.dirty(execution, Phase.BUILD) should be (Yes) + target.execute(execution, Phase.BUILD) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/CopyTargetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/CopyTargetTest.scala index bdd5f0db1..9142cc757 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/CopyTargetTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/CopyTargetTest.scala @@ -19,8 +19,8 @@ package com.dimajix.flowman.spec.target import java.io.File import org.apache.hadoop.fs.Path -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No import com.dimajix.common.Yes @@ -38,7 +38,7 @@ import com.dimajix.flowman.types.SingleValue import com.dimajix.spark.testing.LocalSparkSession -class CopyTargetTest extends FlatSpec with Matchers with LocalSparkSession { +class CopyTargetTest extends AnyFlatSpec with Matchers with LocalSparkSession { "A CopyTarget" should "support configuration via YML" in { val spec = """ @@ -109,7 +109,7 @@ class CopyTargetTest extends FlatSpec with Matchers with LocalSparkSession { |""".stripMargin val project = Module.read.string(spec).toProject("test") val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val targetFilename = new File(tempDir, "copy-relation-output.csv") diff --git a/flowman-testing/src/test/scala/com/dimajix/flowman/testing/ExamplePluginTestSpec.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/DeleteFileTargetTest.scala similarity index 50% rename from flowman-testing/src/test/scala/com/dimajix/flowman/testing/ExamplePluginTestSpec.scala rename to flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/DeleteFileTargetTest.scala index c5fcedb11..bc62b989d 100644 --- a/flowman-testing/src/test/scala/com/dimajix/flowman/testing/ExamplePluginTestSpec.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/DeleteFileTargetTest.scala @@ -1,5 +1,5 @@ /* - * Copyright 2019 Kaya Kupferschmidt + * Copyright 2021 Kaya Kupferschmidt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,24 +14,22 @@ * limitations under the License. */ -package com.dimajix.flowman.testing +package com.dimajix.flowman.spec.target -import java.io.File +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -import org.scalatest.FlatSpec -import org.scalatest.Matchers - -import com.dimajix.flowman.execution.Phase +class DeleteFileTargetTest extends AnyFlatSpec with Matchers { + "A DeleteTarget" should "be parseable" in { + // TODO + } -class ExamplePluginTestSpec extends FlatSpec with Matchers { - "The example project" should "be testable" in { - val runner = Runner.builder - .withProfile("test") - .withProject(new File("../examples/plugin-example")) - .build() + it should "delete existing files" in { + // TODO + } - val result = runner.runJob("main", Seq(Phase.BUILD)) - result should be(true) + it should "delete existing directories" in { + // TODO } } diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/LocalTargetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/LocalTargetTest.scala index d0b58a9de..ea36f9f3d 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/LocalTargetTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/LocalTargetTest.scala @@ -18,8 +18,8 @@ package com.dimajix.flowman.spec.target import java.nio.file.Paths -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No import com.dimajix.common.Yes @@ -30,7 +30,7 @@ import com.dimajix.flowman.model.TargetIdentifier import com.dimajix.spark.testing.LocalSparkSession -class LocalTargetTest extends FlatSpec with Matchers with LocalSparkSession { +class LocalTargetTest extends AnyFlatSpec with Matchers with LocalSparkSession { "A LocalTarget" should "be buildable" in { val spark = this.spark val outputPath = Paths.get(tempDir.toString, "local-target", "data.csv") @@ -52,7 +52,7 @@ class LocalTargetTest extends FlatSpec with Matchers with LocalSparkSession { val session = Session.builder() .withSparkSession(spark) .build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) import spark.implicits._ diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/MergeFilesTargetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/MergeFilesTargetTest.scala index a7f07b756..a8aec072e 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/MergeFilesTargetTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/MergeFilesTargetTest.scala @@ -18,8 +18,8 @@ package com.dimajix.flowman.spec.target import java.nio.charset.Charset -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.common.No import com.dimajix.common.Yes @@ -29,10 +29,10 @@ import com.dimajix.flowman.model.Target import com.dimajix.spark.testing.LocalTempDir -class MergeFilesTargetTest extends FlatSpec with Matchers with LocalTempDir { +class MergeFilesTargetTest extends AnyFlatSpec with Matchers with LocalTempDir { "A MergeFilesTask" should "work" in { val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val context = session.context val fs = session.fs @@ -90,7 +90,7 @@ class MergeFilesTargetTest extends FlatSpec with Matchers with LocalTempDir { it should "support delimiters" in { val session = Session.builder().build() - val executor = session.executor + val executor = session.execution val context = session.context val fs = session.fs diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/NullTargetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/NullTargetTest.scala new file mode 100644 index 000000000..cb3bf724d --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/NullTargetTest.scala @@ -0,0 +1,65 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.target + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.common.No +import com.dimajix.flowman.execution.Lifecycle +import com.dimajix.flowman.execution.Phase +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.model.Target + + +class NullTargetTest extends AnyFlatSpec with Matchers { + "A NullTarget" should "be parseable" in { + val spec = + """ + |targets: + | custom: + | kind: null + | partition: + | p1: lala + |""".stripMargin + + val module = Module.read.string(spec) + val target = module.targets("custom") + target shouldBe an[NullTargetSpec] + } + + it should "do nothing" in { + val session = Session.builder.build() + val execution = session.execution + val context = session.context + + val target = NullTarget( + Target.Properties(context), + Map("p1" -> "lala") + ) + + target.phases should be (Lifecycle.ALL.toSet) + target.requires(Phase.BUILD) should be (Set()) + target.provides(Phase.BUILD) should be (Set()) + target.before should be (Seq()) + target.after should be (Seq()) + + target.dirty(execution, Phase.BUILD) should be (No) + target.execute(execution, Phase.BUILD) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/RelationTargetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/RelationTargetTest.scala index 4ca80300d..50167c31e 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/RelationTargetTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/RelationTargetTest.scala @@ -17,8 +17,8 @@ package com.dimajix.flowman.spec.target import org.apache.hadoop.fs.Path -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Context import com.dimajix.flowman.execution.Phase @@ -35,12 +35,11 @@ import com.dimajix.flowman.model.ResourceIdentifier import com.dimajix.flowman.model.Target import com.dimajix.flowman.model.TargetIdentifier import com.dimajix.flowman.spec.mapping.ProvidedMapping -import com.dimajix.flowman.spec.relation.FileRelation import com.dimajix.flowman.spec.relation.NullRelation import com.dimajix.spark.testing.LocalSparkSession -class RelationTargetTest extends FlatSpec with Matchers with LocalSparkSession { +class RelationTargetTest extends AnyFlatSpec with Matchers with LocalSparkSession { "The RelationTarget" should "work" in { val spec = s""" @@ -111,7 +110,7 @@ class RelationTargetTest extends FlatSpec with Matchers with LocalSparkSession { .withSparkSession(spark) .withProject(project) .build() - val executor = session.executor + val executor = session.execution val context = session.getContext(project) val target = context.getTarget(TargetIdentifier("target")) diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/PluginTargetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/TargetSpecTest.scala similarity index 60% rename from flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/PluginTargetTest.scala rename to flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/TargetSpecTest.scala index 3bce476cd..664db8e0a 100644 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/PluginTargetTest.scala +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/TargetSpecTest.scala @@ -16,40 +16,34 @@ package com.dimajix.flowman.spec.target -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -import com.dimajix.flowman.annotation.TargetType import com.dimajix.flowman.execution.Context import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.BaseTarget import com.dimajix.flowman.model.Module import com.dimajix.flowman.model.Target +import com.dimajix.flowman.spec.annotation.TargetType -case class AnnotatedTarget(instanceProperties:Target.Properties) extends BaseTarget { -} - -@TargetType(kind = "annotatedTask") +@TargetType(kind = "annotatedTarget") class AnnotatedTargetSpec extends TargetSpec { - override def instantiate(context: Context): Target = AnnotatedTarget(instanceProperties(context)) + override def instantiate(context: Context): Target = ??? } -class PluginTargetTest extends FlatSpec with Matchers { - "A plugin" should "be used if present" in { - val session = Session.builder().build() +class TargetSpecTest extends AnyFlatSpec with Matchers { + "TargetSpec" should "support custom targets" in { val spec = """ |targets: | custom: - | kind: annotatedTask + | kind: annotatedTarget """.stripMargin val module = Module.read.string(spec) - module.targets.keys should contain("custom") - val target = module.targets("custom").instantiate(session.context) - target shouldBe an[AnnotatedTarget] + val target = module.targets("custom") + target shouldBe an[AnnotatedTargetSpec] } - } diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/ValidateTargetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/ValidateTargetTest.scala new file mode 100644 index 000000000..e2c025e41 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/ValidateTargetTest.scala @@ -0,0 +1,136 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.target + +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.Phase +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.execution.ValidationFailedException +import com.dimajix.flowman.model.Assertion +import com.dimajix.flowman.model.AssertionResult +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.model.Target + + +class ValidateTargetTest extends AnyFlatSpec with Matchers with MockFactory { + "The ValidateTarget" should "be parseable" in { + val spec = + """ + |targets: + | custom: + | kind: validate + | assertions: + | check_primary_key: + | kind: sql + | tests: + | - query: "SELECT * FROM somewhere" + | expected: ["a"] + |""".stripMargin + + val module = Module.read.string(spec) + val target = module.targets("custom") + target shouldBe an[ValidateTargetSpec] + } + + it should "execute assertions" in { + val session = Session.builder.build() + val execution = session.execution + val context = session.context + + val assertion = mock[Assertion] + val target = ValidateTarget( + Target.Properties(context), + Map("a1" -> assertion) + ) + + (assertion.requires _).expects().returns(Set()) + (assertion.inputs _).expects().atLeastOnce().returns(Seq()) + (assertion.description _).expects().returns(None) + (assertion.context _).expects().returns(context) + (assertion.execute _).expects(*,*).returns(Seq(AssertionResult("a1", true))) + + target.phases should be (Set(Phase.VALIDATE)) + target.requires(Phase.VALIDATE) should be (Set()) + target.provides(Phase.VALIDATE) should be (Set()) + target.before should be (Seq()) + target.after should be (Seq()) + + target.dirty(execution, Phase.VALIDATE) should be (Yes) + target.execute(execution, Phase.VALIDATE) + } + + it should "return success on an empty list of assertions" in { + val session = Session.builder.build() + val execution = session.execution + val context = session.context + + val assertion = mock[Assertion] + val target = ValidateTarget( + Target.Properties(context), + Map("a1" -> assertion) + ) + + (assertion.requires _).expects().returns(Set()) + (assertion.inputs _).expects().atLeastOnce().returns(Seq()) + (assertion.description _).expects().returns(None) + (assertion.context _).expects().returns(context) + (assertion.execute _).expects(*,*).returns(Seq()) + + target.phases should be (Set(Phase.VALIDATE)) + target.requires(Phase.VALIDATE) should be (Set()) + target.provides(Phase.VALIDATE) should be (Set()) + target.before should be (Seq()) + target.after should be (Seq()) + + target.dirty(execution, Phase.VALIDATE) should be (Yes) + target.execute(execution, Phase.VALIDATE) + } + + it should "throw an exception when an assertion fails" in { + val session = Session.builder.build() + val execution = session.execution + val context = session.context + + val assertion = mock[Assertion] + val target = ValidateTarget( + Target.Properties(context), + Map("a1" -> assertion) + ) + + (assertion.requires _).expects().returns(Set()) + (assertion.inputs _).expects().atLeastOnce().returns(Seq()) + (assertion.description _).expects().returns(None) + (assertion.context _).expects().returns(context) + (assertion.execute _).expects(*,*).returns(Seq( + AssertionResult("a1", false), + AssertionResult("a1", true) + )) + + target.phases should be (Set(Phase.VALIDATE)) + target.requires(Phase.VALIDATE) should be (Set()) + target.provides(Phase.VALIDATE) should be (Set()) + target.before should be (Seq()) + target.after should be (Seq()) + + target.dirty(execution, Phase.VALIDATE) should be (Yes) + a[ValidationFailedException] should be thrownBy(target.execute(execution, Phase.VALIDATE)) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/VerifyTargetTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/VerifyTargetTest.scala new file mode 100644 index 000000000..6d8b1a62d --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/target/VerifyTargetTest.scala @@ -0,0 +1,136 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.target + +import org.scalamock.scalatest.MockFactory +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.common.Yes +import com.dimajix.flowman.execution.Phase +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.execution.ValidationFailedException +import com.dimajix.flowman.model.Assertion +import com.dimajix.flowman.model.AssertionResult +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.model.Target + + +class VerifyTargetTest extends AnyFlatSpec with Matchers with MockFactory { + "The VerifyTarget" should "be parseable" in { + val spec = + """ + |targets: + | custom: + | kind: verify + | assertions: + | check_primary_key: + | kind: sql + | tests: + | - query: "SELECT * FROM somewhere" + | expected: ["a"] + |""".stripMargin + + val module = Module.read.string(spec) + val target = module.targets("custom") + target shouldBe an[VerifyTargetSpec] + } + + it should "execute assertions" in { + val session = Session.builder.build() + val execution = session.execution + val context = session.context + + val assertion = mock[Assertion] + val target = VerifyTarget( + Target.Properties(context), + Map("a1" -> assertion) + ) + + (assertion.requires _).expects().returns(Set()) + (assertion.inputs _).expects().atLeastOnce().returns(Seq()) + (assertion.description _).expects().returns(None) + (assertion.context _).expects().returns(context) + (assertion.execute _).expects(*,*).returns(Seq(AssertionResult("a1", true))) + + target.phases should be (Set(Phase.VERIFY)) + target.requires(Phase.VERIFY) should be (Set()) + target.provides(Phase.VERIFY) should be (Set()) + target.before should be (Seq()) + target.after should be (Seq()) + + target.dirty(execution, Phase.VERIFY) should be (Yes) + target.execute(execution, Phase.VERIFY) + } + + it should "return success on an empty list of assertions" in { + val session = Session.builder.build() + val execution = session.execution + val context = session.context + + val assertion = mock[Assertion] + val target = VerifyTarget( + Target.Properties(context), + Map("a1" -> assertion) + ) + + (assertion.requires _).expects().returns(Set()) + (assertion.inputs _).expects().atLeastOnce().returns(Seq()) + (assertion.description _).expects().returns(None) + (assertion.context _).expects().returns(context) + (assertion.execute _).expects(*,*).returns(Seq()) + + target.phases should be (Set(Phase.VERIFY)) + target.requires(Phase.VERIFY) should be (Set()) + target.provides(Phase.VERIFY) should be (Set()) + target.before should be (Seq()) + target.after should be (Seq()) + + target.dirty(execution, Phase.VERIFY) should be (Yes) + target.execute(execution, Phase.VERIFY) + } + + it should "throw an exception when an assertion fails" in { + val session = Session.builder.build() + val execution = session.execution + val context = session.context + + val assertion = mock[Assertion] + val target = VerifyTarget( + Target.Properties(context), + Map("a1" -> assertion) + ) + + (assertion.requires _).expects().returns(Set()) + (assertion.inputs _).expects().atLeastOnce().returns(Seq()) + (assertion.description _).expects().returns(None) + (assertion.context _).expects().returns(context) + (assertion.execute _).expects(*,*).returns(Seq( + AssertionResult("a1", false), + AssertionResult("a1", true) + )) + + target.phases should be (Set(Phase.VERIFY)) + target.requires(Phase.VERIFY) should be (Set()) + target.provides(Phase.VERIFY) should be (Set()) + target.before should be (Seq()) + target.after should be (Seq()) + + target.dirty(execution, Phase.VERIFY) should be (Yes) + a[ValidationFailedException] should be thrownBy(target.execute(execution, Phase.VERIFY)) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spec/test/TestSpecTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/test/TestSpecTest.scala new file mode 100644 index 000000000..0c48b9ea0 --- /dev/null +++ b/flowman-spec/src/test/scala/com/dimajix/flowman/spec/test/TestSpecTest.scala @@ -0,0 +1,72 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.spec.test + +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers + +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Module +import com.dimajix.flowman.model.TargetIdentifier +import com.dimajix.flowman.model.TestIdentifier + + +class TestSpecTest extends AnyFlatSpec with Matchers { + "A Test" should "be deseializable from" in { + val spec = + """ + |tests: + | base_test: + | environment: + | - x=y + | + | test: + | description: Some Test + | extends: + | - base_test + | + | fixtures: + | dummy_fixture: + | kind: null + | + | overrideMappings: + | some_mapping: + | kind: mock + | + | overrideRelations: + | some_relation: + | kind: mock + | + | targets: + | - grabenv + """.stripMargin + + val project = Module.read.string(spec).toProject("project") + val session = Session.builder().build() + val context = session.getContext(project) + + val test = context.getTest(TestIdentifier("test")) + test.name should be ("test") + test.identifier should be (TestIdentifier("project/test")) + test.description should be (Some("Some Test")) + test.targets should be (Seq(TargetIdentifier("grabenv"))) + test.environment should be (Map("x" -> "y")) + test.fixtures.keySet should be (Set("dummy_fixture")) + test.overrideMappings.keySet should be (Set("some_mapping")) + test.overrideRelations.keySet should be (Set("some_relation")) + } +} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spi/CustomRelationProviderTest.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spi/CustomRelationProviderTest.scala deleted file mode 100644 index 8bedc1e6e..000000000 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spi/CustomRelationProviderTest.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright 2018-2020 Kaya Kupferschmidt - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.dimajix.flowman.spi - -import org.scalatest.FlatSpec -import org.scalatest.Matchers - -import com.dimajix.flowman.annotation.RelationType -import com.dimajix.flowman.model.Module -import com.dimajix.flowman.spec.relation.NullRelationSpec - - -@RelationType(kind="customRelation") -class CustomRelationSpec extends NullRelationSpec { -} - - -class CustomRelationProviderTest extends FlatSpec with Matchers { - "A plugin" should "be used if present" in { - val spec = - """ - |relations: - | custom: - | kind: customRelation - """.stripMargin - val module = Module.read.string(spec) - module.relations.keys should contain("custom") - val rel = module.relations("custom") - rel shouldBe a[CustomRelationSpec] - } -} diff --git a/flowman-spec/src/test/scala/com/dimajix/flowman/spi/CustomTargetProvider.scala b/flowman-spec/src/test/scala/com/dimajix/flowman/spi/CustomTargetProvider.scala deleted file mode 100644 index cb90707f5..000000000 --- a/flowman-spec/src/test/scala/com/dimajix/flowman/spi/CustomTargetProvider.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright 2018-2020 Kaya Kupferschmidt - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.dimajix.flowman.spi - -import org.scalatest.FlatSpec -import org.scalatest.Matchers - -import com.dimajix.flowman.annotation.TargetType -import com.dimajix.flowman.model.Module -import com.dimajix.flowman.spec.target.NullTargetSpec - - -@TargetType(kind="customTarget") -class CustomTargetSpec extends NullTargetSpec { -} - -class CustomTargetProvider extends FlatSpec with Matchers { - "A plugin" should "be used if present" in { - val spec = - """ - |targets: - | custom: - | kind: customTarget - """.stripMargin - val module = Module.read.string(spec) - module.targets.keys should contain("custom") - val target = module.targets("custom") - target shouldBe a[CustomTargetSpec] - } -} diff --git a/flowman-testing/pom.xml b/flowman-testing/pom.xml index eda736809..62449d0a3 100644 --- a/flowman-testing/pom.xml +++ b/flowman-testing/pom.xml @@ -9,7 +9,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 .. @@ -39,17 +39,18 @@ com.dimajix.flowman - flowman-spec + flowman-scalatest-compat + compile com.dimajix.flowman - flowman-dsl + flowman-spec com.dimajix.flowman - flowman-plugin-example + flowman-dsl diff --git a/flowman-testing/src/main/scala/com/dimajix/flowman/testing/Runner.scala b/flowman-testing/src/main/scala/com/dimajix/flowman/testing/Runner.scala index cb108c896..f7acbeb7b 100644 --- a/flowman-testing/src/main/scala/com/dimajix/flowman/testing/Runner.scala +++ b/flowman-testing/src/main/scala/com/dimajix/flowman/testing/Runner.scala @@ -35,9 +35,12 @@ import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.Session import com.dimajix.flowman.execution.Status import com.dimajix.flowman.hadoop.FileSystem +import com.dimajix.flowman.model.Job import com.dimajix.flowman.model.JobIdentifier import com.dimajix.flowman.model.Namespace import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.Test +import com.dimajix.flowman.model.TestIdentifier import com.dimajix.spark.features @@ -65,7 +68,8 @@ object Runner { private var environment:Map[String,String] = Map() private var config:Map[String,String] = Map() private var profiles:Seq[String] = Seq() - private var sparkMaster:String = "local[2]" + private var sparkMaster:String = "local[*]" + private var sparkName:String = "" private lazy val fs = FileSystem(new Configuration(false)) def withNamespace(namespace:URL) : Builder = { @@ -125,13 +129,18 @@ object Runner { this } + def withSparkName(name:String) : Builder = { + this.sparkName = name + this + } + def withSparkMaster(master:String) : Builder = { this.sparkMaster = master this } def build() : Runner = { - new Runner(namespace, project, environment, config, profiles, sparkMaster) + new Runner(namespace, project, environment, config, profiles, sparkMaster, sparkName) } } @@ -145,7 +154,8 @@ class Runner private( environment: Map[String,String], config: Map[String,String], profiles: Seq[String], - sparkMaster:String + sparkMaster:String, + sparkName:String ) { val tempDir : File = createTempDir() @@ -186,16 +196,24 @@ class Runner private( /** * Provides access to the Flowman session */ - val session : Session = Session.builder() - .withSparkSession(conf => createSparkSession(conf)) - .withNamespace(namespace) - .withProject(project) - .withEnvironment(environment) - .withConfig(hiveOverrides) - .withConfig(sparkOverrides) - .withConfig(config) - .withProfiles(profiles) - .build() + val session : Session = { + val builder = Session.builder() + .withSparkSession(conf => createSparkSession(conf)) + .withNamespace(namespace) + .withProject(project) + .withEnvironment(environment) + .withConfig(hiveOverrides) + .withConfig(sparkOverrides) + .withConfig(config) + .withProfiles(profiles) + + if (sparkMaster.nonEmpty) + builder.withSparkMaster(sparkMaster) + if (sparkName.nonEmpty) + builder.withSparkName(sparkName) + + builder.build() + } /** * Run a single job within the project @@ -206,8 +224,12 @@ class Runner private( def runJob(jobName:String, phases:Seq[Phase], args:Map[String,String] = Map()) : Boolean = { val context = session.getContext(project) val job = context.getJob(JobIdentifier(jobName)) + runJob(job, phases, args) + } + + def runJob(job:Job, phases:Seq[Phase], args:Map[String,String]) : Boolean = { val runner = session.runner - val result = runner.executeJob(job, phases, args, true) + val result = runner.executeJob(job, phases, args, force=true) result match { case Status.SUCCESS => true @@ -224,6 +246,48 @@ class Runner private( runJob(jobName, phases.asScala, args.asScala.toMap) } + + /** + * Run a single test within the project + * @param testName + * @param args + * @return + */ + def runTest(testName:String) : Boolean = { + val context = session.getContext(project) + val test = context.getTest(TestIdentifier(testName)) + runTest(test) + } + + def runTest(test:Test) : Boolean = { + val runner = session.runner + val result = runner.executeTest(test) + + result match { + case Status.SUCCESS => true + case Status.SKIPPED => true + case _ => false + } + } + + /** + * Runs all non-empty tests in a project. Tests without any assertions will be skipped. + * @return + */ + def runTests() : Boolean = { + val context = session.getContext(project) + + project.tests.keys.toSeq.forall { testName => + val test = context.getTest(TestIdentifier(testName)) + if (test.assertions.nonEmpty) { + runTest(test) + } + else { + true + } + } + } + /** * Releases all resources including the Spark session and temporary directory */ diff --git a/flowman-testing/src/test/scala/com/dimajix/flowman/testing/ResourcesTestSpec.scala b/flowman-testing/src/test/scala/com/dimajix/flowman/testing/ResourcesTestSpec.scala index d6b4727a9..7ffbd5d20 100644 --- a/flowman-testing/src/test/scala/com/dimajix/flowman/testing/ResourcesTestSpec.scala +++ b/flowman-testing/src/test/scala/com/dimajix/flowman/testing/ResourcesTestSpec.scala @@ -17,15 +17,16 @@ package com.dimajix.flowman.testing import com.google.common.io.Resources -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Phase -class ResourcesTestSpec extends FlatSpec with Matchers { +class ResourcesTestSpec extends AnyFlatSpec with Matchers { "Projects as resources" should "be testable" in { val runner = Runner.builder + .withEnvironment("env", "some_value") .withProfile("test") .withProject(Resources.getResource("flows/project.yml")) .build() diff --git a/flowman-tools/pom.xml b/flowman-tools/pom.xml index 68b21d66a..9b64fc302 100644 --- a/flowman-tools/pom.xml +++ b/flowman-tools/pom.xml @@ -9,7 +9,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 .. @@ -23,6 +23,75 @@ org.scalatest scalatest-maven-plugin + + + org.apache.maven.plugins + maven-dependency-plugin + + + initialize + + build-classpath + + + runtime + flowman-tools.classpath + false + , + $FLOWMAN_HOME/lib + + json,org.everit.json.schema,velocity-engine-core + + + + + + org.apache.maven.plugins + maven-resources-plugin + + + copy-resources + process-resources + + copy-resources + + + ${project.build.directory}/properties + + + src/main/properties + + **/* + + true + + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + + + attach-artifacts + package + + attach-artifact + + + + + ${project.build.directory}/properties/flowman-tools.properties + properties + properties + + + + + + @@ -37,6 +106,11 @@ flowman-dsl + + com.dimajix.flowman + flowman-scalatest-compat + + org.apache.hadoop hadoop-client diff --git a/flowman-tools/src/main/properties/flowman-tools.properties b/flowman-tools/src/main/properties/flowman-tools.properties new file mode 100644 index 000000000..7c0767626 --- /dev/null +++ b/flowman-tools/src/main/properties/flowman-tools.properties @@ -0,0 +1 @@ +flowman-tools.classpath=${flowman-tools.classpath} diff --git a/flowman-tools/src/main/resources/com/dimajix/flowman/log4j-defaults.properties b/flowman-tools/src/main/resources/com/dimajix/flowman/log4j-defaults.properties index 3d5893182..48013d553 100644 --- a/flowman-tools/src/main/resources/com/dimajix/flowman/log4j-defaults.properties +++ b/flowman-tools/src/main/resources/com/dimajix/flowman/log4j-defaults.properties @@ -19,7 +19,7 @@ log4j.rootCategory=INFO, console log4j.appender.console=org.apache.log4j.ConsoleAppender log4j.appender.console.target=System.err log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n +log4j.appender.console.layout.ConversionPattern=[%p] %m%n # Settings to quiet third party logs that are too verbose diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/StatefulTool.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/StatefulTool.scala index 46a07ef72..2a299be37 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/StatefulTool.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/StatefulTool.scala @@ -6,6 +6,7 @@ import com.dimajix.flowman.execution.Context import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Job import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.Test class StatefulTool( @@ -17,6 +18,7 @@ class StatefulTool( ) extends Tool { private var _project: Project = Project("empty") private var _job: Option[Job] = None + private var _test: Option[Test] = None private var _context: Context = _ private var _session: Session = _ @@ -31,6 +33,8 @@ class StatefulTool( def job: Option[Job] = _job + def test: Option[Test] = _test + def newSession() : Session = { if (_session != null) { _session.shutdown() @@ -64,14 +68,27 @@ class StatefulTool( def enterJob(job: Job, args:Map[String,String]): Unit = { val jargs = job.arguments(args) _context = _session.runner.withJobContext(job,jargs) { (context,args) => context } - _session.executor.cleanup() + _session.execution.cleanup() + _test = None _job = Some(job) } def leaveJob(): Unit = { _context = _session.getContext(project) - _session.executor.cleanup() + _session.execution.cleanup() + _job = None + } + + def enterTest(test: Test): Unit = { + _context = _session.runner.withTestContext(test) { context => context } + _session.execution.cleanup() _job = None + _test = Some(test) } + def leaveTest(): Unit = { + _context = _session.getContext(project) + _session.execution.cleanup() + _test = None + } } diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/Tool.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/Tool.scala index d1a05f4b1..ae7f1b932 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/Tool.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/Tool.scala @@ -108,12 +108,13 @@ class Tool { val builder = Session.builder() .withNamespace(namespace) .withProject(project.orNull) - .withSparkName(sparkName) .withConfig(allConfigs) .withEnvironment(additionalEnvironment) .withProfiles(profiles) .withJars(plugins.jars.map(_.toString)) + if (sparkName.nonEmpty) + builder.withSparkName(sparkName) if (sparkMaster.nonEmpty) builder.withSparkMaster(sparkMaster) diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/Arguments.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/Arguments.scala index 757fc968f..fdca6144f 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/Arguments.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/Arguments.scala @@ -34,6 +34,7 @@ import com.dimajix.flowman.tools.exec.model.ModelCommand import com.dimajix.flowman.tools.exec.namespace.NamespaceCommand import com.dimajix.flowman.tools.exec.project.ProjectCommand import com.dimajix.flowman.tools.exec.target.TargetCommand +import com.dimajix.flowman.tools.exec.test.TestCommand class Arguments(args:Array[String]) { @@ -45,6 +46,8 @@ class Arguments(args:Array[String]) { var profiles: Array[String] = Array() @Option(name = "-D", aliases=Array("--env"), usage = "set environment variables which can be accessed inside config", metaVar = "") var environment: Array[String] = Array() + @Option(name = "-B", aliases=Array("--batch-mode"), usage = "Run in non-interactive batch mode. Disables output color.") + var batchMode: Boolean = false @Option(name = "--conf", usage = "set a Flowman or Spark config", metaVar = "=") var config: Array[String] = Array() @Option(name = "--info", usage = "dump configuration information") @@ -54,7 +57,7 @@ class Arguments(args:Array[String]) { @Option(name = "--spark-logging", usage = "set the log level for Spark", metaVar = "") var sparkLogging: String = "WARN" @Option(name = "--spark-name", usage = "set the Spark application name", metaVar = "") - var sparkName: String = "flowman" + var sparkName: String = "Flowman Exec" @Argument(required=false,index=0,metaVar="",usage="the object to work with",handler=classOf[SubCommandHandler]) @SubCommands(Array( @@ -65,6 +68,7 @@ class Arguments(args:Array[String]) { new SubCommand(name="mapping",impl=classOf[MappingCommand]), new SubCommand(name="namespace",impl=classOf[NamespaceCommand]), new SubCommand(name="target",impl=classOf[TargetCommand]), + new SubCommand(name="test",impl=classOf[TestCommand]), new SubCommand(name="project",impl=classOf[ProjectCommand]) )) var command:Command = _ diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/Driver.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/Driver.scala index 7ddfa35be..22db45ce0 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/Driver.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/Driver.scala @@ -26,6 +26,7 @@ import org.kohsuke.args4j.CmdLineException import com.dimajix.flowman.spec.splitSettings import com.dimajix.flowman.tools.Logging import com.dimajix.flowman.tools.Tool +import com.dimajix.flowman.util.ConsoleColors object Driver { @@ -74,6 +75,9 @@ class Driver(options:Arguments) extends Tool { * @return */ def run() : Boolean = { + // Disable colors in batch mode + ConsoleColors.disabled = options.batchMode + val command = options.command if (command.help) { command.printHelp(System.out) diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/job/InfoCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/job/InspectCommand.scala similarity index 78% rename from flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/job/InfoCommand.scala rename to flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/job/InspectCommand.scala index 8d4ad6710..e89acdd67 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/job/InfoCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/job/InspectCommand.scala @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.tools.shell.job +package com.dimajix.flowman.tools.exec.job import scala.util.control.NonFatal @@ -22,22 +22,24 @@ import org.kohsuke.args4j.Argument import org.slf4j.LoggerFactory import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.NoSuchJobException import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.JobIdentifier import com.dimajix.flowman.model.Project import com.dimajix.flowman.tools.exec.Command -class InfoCommand extends Command { - private val logger = LoggerFactory.getLogger(classOf[InfoCommand]) +class InspectCommand extends Command { + private val logger = LoggerFactory.getLogger(classOf[InspectCommand]) - @Argument(index=0, required=true, usage = "name of job to enter", metaVar = "") + @Argument(index=0, required=true, usage = "name of job to inspect", metaVar = "") var job: String = "" override def execute(session: Session, project:Project, context:Context): Boolean = { try { val job = context.getJob(JobIdentifier(this.job)) println(s"Name: ${job.name}") + println(s"Description: ${job.description}") println("Targets:") job.targets .foreach{ p => println(s" $p") } @@ -53,6 +55,9 @@ class InfoCommand extends Command { true } catch { + case ex:NoSuchJobException => + logger.error(s"Cannot resolve job '${ex.job}'") + false case NonFatal(e) => logger.error(s"Error '$job': ${e.getMessage}") false diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/job/JobCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/job/JobCommand.scala index 0a8c057d0..2b4556726 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/job/JobCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/job/JobCommand.scala @@ -31,6 +31,8 @@ class JobCommand extends NestedCommand { @Argument(required=true,index=0,metaVar="",usage="the subcommand to run",handler=classOf[SubCommandHandler]) @SubCommands(Array( new SubCommand(name="list",impl=classOf[ListCommand]), + new SubCommand(name="inspect",impl=classOf[InspectCommand]), + new SubCommand(name="validate",impl=classOf[ValidateCommand]), new SubCommand(name="create",impl=classOf[CreateCommand]), new SubCommand(name="build",impl=classOf[BuildCommand]), new SubCommand(name="verify",impl=classOf[VerifyCommand]), diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/job/PhaseCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/job/PhaseCommand.scala index 53b24ebeb..d13413609 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/job/PhaseCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/job/PhaseCommand.scala @@ -46,10 +46,14 @@ sealed class PhaseCommand(phase:Phase) extends ActionCommand { var job: String = "" @Argument(index=1, required=false, usage = "specifies job parameters", metaVar = "=") var args: Array[String] = Array() + @Option(name = "-t", aliases=Array("--target"), usage = "only process specific targets, as specified by a regex", metaVar = "") + var targets: Array[String] = Array(".*") @Option(name = "-f", aliases=Array("--force"), usage = "forces execution, even if outputs are already created") var force: Boolean = false @Option(name = "-k", aliases=Array("--keep-going"), usage = "continues execution of job with next target in case of errors") var keepGoing: Boolean = false + @Option(name = "--dry-run", usage = "perform dry run without actually executing build targets") + var dryRun: Boolean = false @Option(name = "-nl", aliases=Array("--no-lifecycle"), usage = "only executes the specific phase and not the whole lifecycle") var noLifecycle: Boolean = false @@ -76,7 +80,7 @@ sealed class PhaseCommand(phase:Phase) extends ActionCommand { job.interpolate(args).forall { args => val runner = session.runner - val result = runner.executeJob(job, lifecycle, args, force, keepGoing) + val result = runner.executeJob(job, lifecycle, args, targets.map(_.r), force, keepGoing, dryRun) result match { case Status.SUCCESS => true case Status.SKIPPED => true @@ -86,6 +90,7 @@ sealed class PhaseCommand(phase:Phase) extends ActionCommand { } } +class ValidateCommand extends PhaseCommand(Phase.VALIDATE) class CreateCommand extends PhaseCommand(Phase.CREATE) class BuildCommand extends PhaseCommand(Phase.BUILD) class VerifyCommand extends PhaseCommand(Phase.VERIFY) diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/CountCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/CountCommand.scala index bc2399a67..bce9c59ed 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/CountCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/CountCommand.scala @@ -44,7 +44,7 @@ class CountCommand extends ActionCommand { val task = CountTarget(context, MappingOutputIdentifier(mapping)) Try { - task.execute(session.executor, Phase.BUILD) + task.execute(session.execution, Phase.BUILD) } match { case Success(_) => logger.info("Successfully counted mapping") diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/DependencyTreeCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/DependencyTreeCommand.scala new file mode 100644 index 000000000..9d48f880a --- /dev/null +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/DependencyTreeCommand.scala @@ -0,0 +1,57 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.tools.exec.mapping + +import scala.util.control.NonFatal + +import org.kohsuke.args4j.Argument +import org.slf4j.LoggerFactory + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.NoSuchMappingException +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.graph.GraphBuilder +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.MappingIdentifier +import com.dimajix.flowman.tools.exec.ActionCommand + + +class DependencyTreeCommand extends ActionCommand { + private val logger = LoggerFactory.getLogger(classOf[DependencyTreeCommand]) + + @Argument(required = true, usage = "specifies mapping to inspect", metaVar = "") + var mapping: String = "" + + override protected def executeInternal(session: Session, context: Context, project: Project): Boolean = { + try { + val mapping = context.getMapping(MappingIdentifier(this.mapping)) + val graph = new GraphBuilder(context).addMapping(mapping).build() + val node = graph.mapping(mapping) + println(node.upstreamDependencyTree) + true + } + catch { + case ex:NoSuchMappingException => + logger.error(s"Cannot resolve mapping '${ex.mapping}'") + false + case NonFatal(e) => + logger.error(s"Error '$mapping': ${e.getMessage}") + false + } + + } +} diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/DescribeCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/DescribeCommand.scala index cc92540a3..b08609f83 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/DescribeCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/DescribeCommand.scala @@ -42,10 +42,10 @@ class DescribeCommand extends ActionCommand { var mapping: String = "" override def executeInternal(session: Session, context:Context, project: Project) : Boolean = { - Try { + try { val identifier = MappingOutputIdentifier(this.mapping) val mapping = context.getMapping(identifier.mapping) - val executor = session.executor + val executor = session.execution if (useSpark) { val df = executor.instantiate(mapping, identifier.output) @@ -55,14 +55,13 @@ class DescribeCommand extends ActionCommand { val schema = executor.describe(mapping, identifier.output) schema.printTree() } - } match { - case Success(_) => - logger.info("Successfully finished describing mapping") - true - case Failure(ex:NoSuchMappingException) => + true + } + catch { + case ex:NoSuchMappingException => logger.error(s"Cannot resolve mapping '${ex.mapping}'") false - case Failure(NonFatal(e)) => + case NonFatal(e) => logger.error(s"Caught exception while describing mapping '$mapping'", e) false } diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ExplainCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ExplainCommand.scala index ca0be7ade..281000364 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ExplainCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ExplainCommand.scala @@ -45,20 +45,19 @@ class ExplainCommand extends ActionCommand { override def executeInternal(session: Session, context:Context, project: Project) : Boolean = { logger.info(s"Explaining mapping '$mapping'") - Try { + try { val id = MappingOutputIdentifier(mapping) val instance = context.getMapping(id.mapping) - val executor = session.executor + val executor = session.execution val table = executor.instantiate(instance, id.output) table.explain(extended) - } match { - case Success(_) => - logger.info("Successfully finished explaining mapping") - true - case Failure(ex:NoSuchMappingException) => + true + } + catch { + case ex:NoSuchMappingException => logger.error(s"Cannot resolve mapping '${ex.mapping}'") false - case Failure(NonFatal(e)) => + case NonFatal(e) => logger.error(s"Caught exception while explaining mapping '$mapping", e) false } diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ExportSchemaCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ExportSchemaCommand.scala index aca972d9c..9b1506d01 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ExportSchemaCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ExportSchemaCommand.scala @@ -53,7 +53,7 @@ class ExportSchemaCommand extends ActionCommand { Try { val id = MappingOutputIdentifier(mapping) val instance = context.getMapping(id.mapping) - val executor = session.executor + val executor = session.execution val schema = if (useSpark) { val table = executor.instantiate(instance, id.output) diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/MappingCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/MappingCommand.scala index 979607f64..3dd6c2dd3 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/MappingCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/MappingCommand.scala @@ -33,6 +33,7 @@ class MappingCommand extends NestedCommand { new SubCommand(name="count",impl=classOf[CountCommand]), new SubCommand(name="describe",impl=classOf[DescribeCommand]), new SubCommand(name="explain",impl=classOf[ExplainCommand]), + new SubCommand(name="deptree",impl=classOf[DependencyTreeCommand]), new SubCommand(name="list",impl=classOf[ListCommand]), new SubCommand(name="validate",impl=classOf[ValidateCommand]), new SubCommand(name="export-schema",impl=classOf[ExportSchemaCommand]), diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/SaveCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/SaveCommand.scala index db0aeb7e7..ccb4bbe39 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/SaveCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/SaveCommand.scala @@ -52,16 +52,15 @@ class SaveCommand extends ActionCommand { override def executeInternal(session: Session, context:Context, project: Project) : Boolean = { val task = FileTarget(context, MappingOutputIdentifier(mapping), new Path(location), format, splitSettings(options).toMap) - Try { - task.execute(session.executor, Phase.BUILD) - } match { - case Success(_) => - logger.info(s"Successfully saved mapping '$mapping' to '$location'") - true - case Failure(ex:NoSuchMappingException) => + try { + task.execute(session.execution, Phase.BUILD) + true + } + catch { + case ex:NoSuchMappingException => logger.error(s"Cannot resolve mapping '${ex.mapping}'") false - case Failure(NonFatal(e)) => + case NonFatal(e) => logger.error(s"Caught exception while save mapping '$mapping'", e) false } diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ShowCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ShowCommand.scala index 1ebe5a75d..767a140b8 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ShowCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ShowCommand.scala @@ -45,17 +45,20 @@ class ShowCommand extends ActionCommand { var columns: String = "" @Option(name="-n", aliases=Array("--limit"), usage="Specifies maximum number of rows to print", metaVar="", required = false) var limit: Int = 10 + @Option(name="-nh", aliases=Array("--no-header"), usage="Print header", required = false) + var noHeader: Boolean = false + @Option(name="-c", aliases=Array("--csv"), usage="Print data as csv", required = false) + var csv: Boolean = false override def executeInternal(session: Session, context:Context, project: Project) : Boolean = { val columns = ParserUtils.parseDelimitedList(this.columns) - val task = ConsoleTarget(context, MappingOutputIdentifier(mapping), limit, columns) + val task = ConsoleTarget(context, MappingOutputIdentifier(mapping), limit, columns, !noHeader, csv) Try { - task.execute(session.executor, Phase.BUILD) + task.execute(session.execution, Phase.BUILD) } match { case Success(_) => - logger.info("Successfully finished dumping mapping") true case Failure(ex:NoSuchMappingException) => logger.error(s"Cannot resolve mapping '${ex.mapping}'") diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ValidateCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ValidateCommand.scala index a591ddb2b..83d38cf62 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ValidateCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/mapping/ValidateCommand.scala @@ -50,7 +50,7 @@ class ValidateCommand extends ActionCommand { project.mappings.keys.toSeq val tables = mappingNames.map(name => context.getMapping(MappingIdentifier(name))) - tables.forall(table => session.executor.instantiate(table) != null) + tables.forall(table => session.execution.instantiate(table) != null) } match { case Success(true) => logger.info("Successfully validated mappings") diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/DescribeCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/DescribeCommand.scala index 8d294c00b..ff699ab01 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/DescribeCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/DescribeCommand.scala @@ -26,6 +26,8 @@ import org.kohsuke.args4j.Option import org.slf4j.LoggerFactory import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.NoSuchJobException +import com.dimajix.flowman.execution.NoSuchRelationException import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Project import com.dimajix.flowman.model.RelationIdentifier @@ -41,22 +43,25 @@ class DescribeCommand extends ActionCommand { var relation: String = "" override def executeInternal(session: Session, context:Context, project: Project) : Boolean = { - Try { + try { val identifier = RelationIdentifier(this.relation) val relation = context.getRelation(identifier) if (useSpark) { - val df = relation.read(session.executor, None, Map()) + val df = relation.read(session.execution, None, Map()) df.printSchema() } else { - relation.schema.foreach(_.printTree()) + val execution = session.execution + val schema = relation.describe(execution) + schema.printTree() } - } match { - case Success(_) => - logger.info("Successfully finished describing relation") - true - case Failure(NonFatal(e)) => + true + } catch { + case ex:NoSuchRelationException => + logger.error(s"Cannot resolve relation '${ex.relation}'") + false + case NonFatal(e) => logger.error(s"Caught exception while describing relation '$relation':", e) false } diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/PhaseCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/PhaseCommand.scala index 5cf03edc3..8ac8d02cd 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/PhaseCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/PhaseCommand.scala @@ -40,6 +40,10 @@ class PhaseCommand(phase:Phase) extends ActionCommand { var relations: Array[String] = Array() @Option(name = "-f", aliases=Array("--force"), usage = "forces execution, even if outputs are already created") var force: Boolean = false + @Option(name = "-k", aliases=Array("--keep-going"), usage = "continues execution of job with next target in case of errors") + var keepGoing: Boolean = false + @Option(name = "--dry-run", usage = "perform dry run without actually executing build targets") + var dryRun: Boolean = false @Option(name = "-p", aliases=Array("--partition"), usage = "specify partition to work on, as partition1=value1,partition2=value2") var partition: String = "" @@ -63,7 +67,7 @@ class PhaseCommand(phase:Phase) extends ActionCommand { .build() val runner = session.runner - val result = runner.executeJob(job, Seq(phase), force=force) + val result = runner.executeJob(job, Seq(phase), force=force, keepGoing=keepGoing, dryRun=dryRun) result match { case Status.SUCCESS => true diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/ShowCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/ShowCommand.scala index 7661b06f6..1525facad 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/ShowCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/model/ShowCommand.scala @@ -21,7 +21,6 @@ import scala.util.Success import scala.util.Try import scala.util.control.NonFatal -import com.sun.xml.internal.ws.wsdl.parser.ParserUtil import org.kohsuke.args4j.Argument import org.kohsuke.args4j.Option import org.slf4j.LoggerFactory @@ -46,19 +45,22 @@ class ShowCommand extends ActionCommand { var columns: String = "" @Option(name="-n", aliases=Array("--limit"), usage="Specifies maximum number of rows to print", metaVar="", required=false) var limit: Int = 10 + @Option(name="-nh", aliases=Array("--no-header"), usage="Print header", required = false) + var noHeader: Boolean = false + @Option(name="-c", aliases=Array("--csv"), usage="Print data as csv", required = false) + var csv: Boolean = false @Option(name="-p", aliases=Array("--partition"), usage = "specify partition to work on, as partition1=value1,partition2=value2") var partition: String = "" override def executeInternal(session: Session, context:Context, project: Project) : Boolean = { val columns = ParserUtils.parseDelimitedList(this.columns) val partition = ParserUtils.parseDelimitedKeyValues(this.partition).map { case(k,v) => (k,SingleValue(v)) } - val task = ConsoleTarget(context, RelationIdentifier(relation), limit, columns, partition) + val task = ConsoleTarget(context, RelationIdentifier(relation), limit, columns, partition, !noHeader, csv) Try { - task.execute(session.executor, Phase.BUILD) + task.execute(session.execution, Phase.BUILD) } match { case Success(_) => - logger.info("Successfully finished dumping relation") true case Failure(NonFatal(e)) => logger.error(s"Caught exception while dumping relation '$relation'", e) diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/namespace/InfoCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/namespace/InspectCommand.scala similarity index 93% rename from flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/namespace/InfoCommand.scala rename to flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/namespace/InspectCommand.scala index 37b07e407..1c4aaa7fe 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/namespace/InfoCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/namespace/InspectCommand.scala @@ -16,16 +16,13 @@ package com.dimajix.flowman.tools.exec.namespace -import scala.collection.JavaConverters._ - import com.dimajix.flowman.execution.Context import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.Project -import com.dimajix.flowman.tools.ToolConfig import com.dimajix.flowman.tools.exec.Command -class InfoCommand extends Command { +class InspectCommand extends Command { override def execute(session: Session, project:Project, context:Context): Boolean = { session.namespace.foreach { ns => println("Namespace:") diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/namespace/NamespaceCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/namespace/NamespaceCommand.scala index 9cc3ffa63..1efd1e8ed 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/namespace/NamespaceCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/namespace/NamespaceCommand.scala @@ -28,7 +28,7 @@ import com.dimajix.flowman.tools.exec.NestedCommand class NamespaceCommand extends NestedCommand { @Argument(required=true,index=0,metaVar="",usage="the subcommand to run",handler=classOf[SubCommandHandler]) @SubCommands(Array( - new SubCommand(name="info",impl=classOf[InfoCommand]) + new SubCommand(name="inspect",impl=classOf[InspectCommand]) )) override var command:Command = _ } diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/project/InfoCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/project/InspectCommand.scala similarity index 96% rename from flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/project/InfoCommand.scala rename to flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/project/InspectCommand.scala index a83585b58..9f4657585 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/project/InfoCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/project/InspectCommand.scala @@ -14,7 +14,7 @@ * limitations under the License. */ -package com.dimajix.flowman.tools.shell.project +package com.dimajix.flowman.tools.exec.project import com.dimajix.flowman.execution.Context import com.dimajix.flowman.execution.Session @@ -22,7 +22,7 @@ import com.dimajix.flowman.model.Project import com.dimajix.flowman.tools.exec.Command -class InfoCommand extends Command { +class InspectCommand extends Command { override def execute(session: Session, project:Project, context:Context): Boolean = { println("Project:") println(s" name: ${project.name}") diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/project/PhaseCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/project/PhaseCommand.scala index 30c90ec2b..3cdc594b9 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/project/PhaseCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/project/PhaseCommand.scala @@ -44,10 +44,14 @@ sealed class PhaseCommand(phase:Phase) extends ActionCommand { @Argument(index=0, required=false, usage = "specifies job parameters", metaVar = "=") var args: Array[String] = Array() + @Option(name = "-t", aliases=Array("--target"), usage = "only process specific targets, as specified by a regex", metaVar = "") + var targets: Array[String] = Array(".*") @Option(name = "-f", aliases=Array("--force"), usage = "forces execution, even if outputs are already created") var force: Boolean = false @Option(name = "-k", aliases=Array("--keep-going"), usage = "continues execution of job with next target in case of errors") var keepGoing: Boolean = false + @Option(name = "--dry-run", usage = "perform dry run without actually executing build targets") + var dryRun: Boolean = false @Option(name = "-nl", aliases=Array("--no-lifecycle"), usage = "only executes the specific phase and not the whole lifecycle") var noLifecycle: Boolean = false @@ -79,7 +83,7 @@ sealed class PhaseCommand(phase:Phase) extends ActionCommand { job.interpolate(args).forall { args => val runner = session.runner - val result = runner.executeJob(job, lifecycle, args, force, keepGoing) + val result = runner.executeJob(job, lifecycle, args, targets.map(_.r), force, keepGoing, dryRun) result match { case Status.SUCCESS => true case Status.SKIPPED => true @@ -89,12 +93,10 @@ sealed class PhaseCommand(phase:Phase) extends ActionCommand { } } -class CreateCommand extends PhaseCommand(Phase.CREATE) +class ValidateCommand extends PhaseCommand(Phase.VALIDATE) +class CreateCommand extends PhaseCommand(Phase.CREATE) class BuildCommand extends PhaseCommand(Phase.BUILD) - class VerifyCommand extends PhaseCommand(Phase.VERIFY) - class TruncateCommand extends PhaseCommand(Phase.TRUNCATE) - class DestroyCommand extends PhaseCommand(Phase.DESTROY) diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/project/ProjectCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/project/ProjectCommand.scala index c48d27d92..fbf31b1d8 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/project/ProjectCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/project/ProjectCommand.scala @@ -30,6 +30,8 @@ import com.dimajix.flowman.tools.exec.NestedCommand class ProjectCommand extends NestedCommand { @Argument(required=true,index=0,metaVar="",usage="the subcommand to run",handler=classOf[SubCommandHandler]) @SubCommands(Array( + new SubCommand(name="inspect",impl=classOf[InspectCommand]), + new SubCommand(name="validate",impl=classOf[ValidateCommand]), new SubCommand(name="create",impl=classOf[CreateCommand]), new SubCommand(name="migrate",impl=classOf[CreateCommand]), new SubCommand(name="build",impl=classOf[BuildCommand]), diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/sql/SqlCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/sql/SqlCommand.scala index ff2cb5f4e..67c7c6439 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/sql/SqlCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/sql/SqlCommand.scala @@ -31,6 +31,7 @@ import com.dimajix.flowman.model.Mapping import com.dimajix.flowman.model.Project import com.dimajix.flowman.spec.mapping.SqlMapping import com.dimajix.flowman.tools.exec.Command +import com.dimajix.flowman.util.ConsoleUtils class SqlCommand extends Command { @@ -49,16 +50,9 @@ class SqlCommand extends Command { sql = Some(statement.mkString(" ")) ) Try { - val executor = session.executor + val executor = session.execution val df = executor.instantiate(mapping, "main") - if (csv) { - val result = df.limit(limit).collect() - println(df.columns.mkString(",")) - result.foreach(record => println(record.mkString(","))) - } - else { - df.show(limit) - } + ConsoleUtils.showDataFrame(df, limit, csv) true } match { case Failure(ex) => diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/DependencyTreeCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/DependencyTreeCommand.scala new file mode 100644 index 000000000..e67eb8ec3 --- /dev/null +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/DependencyTreeCommand.scala @@ -0,0 +1,57 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.tools.exec.target + +import scala.util.control.NonFatal + +import org.kohsuke.args4j.Argument +import org.slf4j.LoggerFactory + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.NoSuchTargetException +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.graph.GraphBuilder +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.TargetIdentifier +import com.dimajix.flowman.tools.exec.ActionCommand + + +class DependencyTreeCommand extends ActionCommand { + private val logger = LoggerFactory.getLogger(classOf[DependencyTreeCommand]) + + @Argument(required = true, usage = "specifies target to inspect", metaVar = "") + var target: String = "" + + override protected def executeInternal(session: Session, context: Context, project: Project): Boolean = { + try { + val target = context.getTarget(TargetIdentifier(this.target)) + val graph = new GraphBuilder(context).addTarget(target).build() + val node = graph.target(target) + println(node.upstreamDependencyTree) + true + } + catch { + case ex:NoSuchTargetException => + logger.error(s"Cannot resolve target '${ex.target}'") + false + case NonFatal(e) => + logger.error(s"Error '$target': ${e.getMessage}") + false + } + + } +} diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/InspectCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/InspectCommand.scala new file mode 100644 index 000000000..9e5a86c47 --- /dev/null +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/InspectCommand.scala @@ -0,0 +1,71 @@ +/* + * Copyright 2021 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.tools.exec.target + +import scala.util.control.NonFatal + +import org.kohsuke.args4j.Argument +import org.slf4j.LoggerFactory + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.NoSuchTargetException +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.TargetIdentifier +import com.dimajix.flowman.tools.exec.ActionCommand + + +class InspectCommand extends ActionCommand { + private val logger = LoggerFactory.getLogger(classOf[InspectCommand]) + + @Argument(required = true, usage = "specifies target to inspect", metaVar = "") + var target: String = "" + + override protected def executeInternal(session: Session, context: Context, project: Project): Boolean = { + try { + val target = context.getTarget(TargetIdentifier(this.target)) + println("Target:") + println(s" name: ${target.name}") + println(s" phases: ${target.phases.mkString(",")}") + println(s" before: ${target.before.mkString(",")}") + println(s" after: ${target.after.mkString(",")}") + target.phases.foreach { phase => + println(s"Phase '$phase':") + println(s" Provides:") + target.provides(phase) + .map(_.toString) + .toSeq.sorted + .foreach{ p => println(s" $p") } + println(s" Requires:") + target.requires(phase) + .map(_.toString) + .toSeq.sorted + .foreach{ p => println(s" $p") } + } + true + } + catch { + case ex:NoSuchTargetException => + logger.error(s"Cannot resolve target '${ex.target}'") + false + case NonFatal(e) => + logger.error(s"Error '$target': ${e.getMessage}") + false + } + + } +} diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/PhaseCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/PhaseCommand.scala index 7f3ea51a6..34bd4d5aa 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/PhaseCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/PhaseCommand.scala @@ -73,6 +73,7 @@ class PhaseCommand(phase:Phase) extends ActionCommand { } +class ValidateCommand extends PhaseCommand(Phase.VALIDATE) class CreateCommand extends PhaseCommand(Phase.CREATE) class BuildCommand extends PhaseCommand(Phase.BUILD) class VerifyCommand extends PhaseCommand(Phase.VERIFY) diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/TargetCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/TargetCommand.scala index 8c3f669fe..e0088a75b 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/TargetCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/TargetCommand.scala @@ -31,6 +31,8 @@ class TargetCommand extends NestedCommand { @Argument(required=true,index=0,metaVar="",usage="the subcommand to run",handler=classOf[SubCommandHandler]) @SubCommands(Array( new SubCommand(name="list",impl=classOf[ListCommand]), + new SubCommand(name="inspect",impl=classOf[InspectCommand]), + new SubCommand(name="deptree",impl=classOf[DependencyTreeCommand]), new SubCommand(name="validate",impl=classOf[ValidateCommand]), new SubCommand(name="create",impl=classOf[CreateCommand]), new SubCommand(name="build",impl=classOf[BuildCommand]), diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/ValidateCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/ValidateCommand.scala deleted file mode 100644 index f3a95fe70..000000000 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/target/ValidateCommand.scala +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright 2018 Kaya Kupferschmidt - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.dimajix.flowman.tools.exec.target - -import scala.util.Failure -import scala.util.Success -import scala.util.Try - -import org.kohsuke.args4j.Argument -import org.slf4j.LoggerFactory - -import com.dimajix.flowman.execution.Context -import com.dimajix.flowman.execution.Session -import com.dimajix.flowman.model.Project -import com.dimajix.flowman.model.TargetIdentifier -import com.dimajix.flowman.tools.exec.ActionCommand - - -class ValidateCommand extends ActionCommand { - private val logger = LoggerFactory.getLogger(classOf[ValidateCommand]) - - @Argument(usage = "specifies target to validate", metaVar = "") - var outputs: Array[String] = Array() - - def executeInternal(session: Session, context:Context, project: Project) : Boolean = { - logger.info("Validating targets {}", if (outputs != null) outputs.mkString(",") else "all") - - Try { - val targets = - if (outputs.nonEmpty) - outputs.toSeq - .map(t => context.getTarget(TargetIdentifier(t))) - else - project.targets.keys.toSeq - .map(t => context.getTarget(TargetIdentifier(t))) - - //val tables = targets.flatMap(_).map(mid => context.getMapping(mid.mapping)) - //tables.forall(table => executor.instantiate(table) != null) - true - } match { - case Success(true) => - logger.info("Successfully validated targets") - true - case Success(false) => - logger.error("Validation of targets failed") - false - case Failure(e) => - logger.error("Caught exception while validating targets", e) - false - } - } -} diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/test/ListCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/test/ListCommand.scala new file mode 100644 index 000000000..cc71ad7d6 --- /dev/null +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/test/ListCommand.scala @@ -0,0 +1,35 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.tools.exec.test + +import org.slf4j.LoggerFactory + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.tools.exec.ActionCommand + + +class ListCommand extends ActionCommand { + private val logger = LoggerFactory.getLogger(classOf[ListCommand]) + + override def executeInternal(session: Session, context:Context, project: Project) : Boolean = { + project.tests.keys.toList.sorted.foreach(println) + true + } + +} diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/test/RunCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/test/RunCommand.scala new file mode 100644 index 000000000..e2959cf83 --- /dev/null +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/test/RunCommand.scala @@ -0,0 +1,77 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.tools.exec.test + +import scala.util.Failure +import scala.util.Success +import scala.util.Try + +import org.kohsuke.args4j.Argument +import org.kohsuke.args4j.Option +import org.slf4j.LoggerFactory + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Lifecycle +import com.dimajix.flowman.execution.Phase +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.execution.Status +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.TargetIdentifier +import com.dimajix.flowman.model.TestIdentifier +import com.dimajix.flowman.tools.exec.ActionCommand + + +class RunCommand extends ActionCommand { + private val logger = LoggerFactory.getLogger(classOf[RunCommand]) + + @Argument(required = false, usage = "specifies tests(s) to execute", metaVar = "") + var tests: Array[String] = Array() + @Option(name = "-k", aliases=Array("--keep-going"), usage = "continues execution of all targets in case of errors") + var keepGoing: Boolean = false + + + override def executeInternal(session: Session, context:Context, project: Project) : Boolean = { + Try { + val allTests = if (tests.nonEmpty) { + tests.flatMap(_.split(",")).toSeq.distinct + } + else { + project.tests.keySet.toSeq + } + + Status.ofAll(allTests, true) { test => + val runner = session.runner + val instance = context.getTest(TestIdentifier(test)) + if (instance.assertions.nonEmpty) { + runner.executeTest(instance, keepGoing = keepGoing) + } + else { + logger.info(s"Skipping test '$test' which does not provide any assertions") + Status.SUCCESS + } + } + } + match { + case Success(Status.SUCCESS) => true + case Success(Status.SKIPPED) => true + case Success(_) => false + case Failure(e) => + logger.error(e.getMessage) + false + } + } +} diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/test/TestCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/test/TestCommand.scala new file mode 100644 index 000000000..c85b53170 --- /dev/null +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/exec/test/TestCommand.scala @@ -0,0 +1,35 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.tools.exec.test + +import org.kohsuke.args4j.Argument +import org.kohsuke.args4j.spi.SubCommand +import org.kohsuke.args4j.spi.SubCommandHandler +import org.kohsuke.args4j.spi.SubCommands + +import com.dimajix.flowman.tools.exec.Command +import com.dimajix.flowman.tools.exec.NestedCommand + + +class TestCommand extends NestedCommand { + @Argument(required=true,index=0,metaVar="",usage="the subcommand to run",handler=classOf[SubCommandHandler]) + @SubCommands(Array( + new SubCommand(name="list",impl=classOf[ListCommand]), + new SubCommand(name="run",impl=classOf[RunCommand]) + )) + override var command:Command = _ +} diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/main/Driver.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/main/Driver.scala index 814a739b6..2c35c92e0 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/main/Driver.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/main/Driver.scala @@ -82,7 +82,7 @@ class Driver(options:Arguments) extends Tool { profiles = options.profiles ) - val executor = session.executor + val executor = session.execution val context = session.getContext(project) //val bundle = context.getJob() @@ -90,7 +90,7 @@ class Driver(options:Arguments) extends Tool { //val bundleArgs = options.arguments.map(kv => kv._1 + "=" + kv._2).mkString(", ") //logger.info(s"Executing job '${bundle.name}' $bundleDescription with args $bundleArgs") - // val result = executeInternal(executor, context, project) + // val result = executeInternal(execution, context, project) // Cleanup caches, but after printing error message. Otherwise it looks confusing when the error occured diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/Arguments.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/Arguments.scala index 85f84799d..ea80839c1 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/Arguments.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/Arguments.scala @@ -53,7 +53,7 @@ class Arguments(args:Array[String]) { @Option(name = "--spark-logging", usage = "set the log level for Spark", metaVar = "") var sparkLogging: String = "WARN" @Option(name = "--spark-name", usage = "set the Spark application name", metaVar = "") - var sparkName: String = "flowman" + var sparkName: String = "Flowman Shell" /** * Returns true if a help message is requested diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/CommandCompleter.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/CommandCompleter.scala index 076baecfd..f99458509 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/CommandCompleter.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/CommandCompleter.scala @@ -62,6 +62,8 @@ class CommandCompleter extends Completer { Shell.instance.project.mappings.keys.toList.sorted case a:Argument if a.metaVar() == "" => Shell.instance.project.jobs.keys.toList.sorted + case a:Argument if a.metaVar() == "" => + Shell.instance.project.tests.keys.toList.sorted case a:Argument if a.metaVar() == "" => Shell.instance.project.targets.keys.toList.sorted case a:Argument if a.metaVar() == "" => diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/ParsedCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/ParsedCommand.scala index fd67ce63f..e8755c307 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/ParsedCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/ParsedCommand.scala @@ -21,8 +21,6 @@ import org.kohsuke.args4j.spi.SubCommand import org.kohsuke.args4j.spi.SubCommandHandler import org.kohsuke.args4j.spi.SubCommands -import com.dimajix.flowman.tools.shell.job.JobCommand -import com.dimajix.flowman.tools.shell.project.ProjectCommand import com.dimajix.flowman.tools.exec.Command import com.dimajix.flowman.tools.exec.info.InfoCommand import com.dimajix.flowman.tools.exec.mapping.MappingCommand @@ -30,6 +28,9 @@ import com.dimajix.flowman.tools.exec.model.ModelCommand import com.dimajix.flowman.tools.exec.namespace.NamespaceCommand import com.dimajix.flowman.tools.exec.sql.SqlCommand import com.dimajix.flowman.tools.exec.target.TargetCommand +import com.dimajix.flowman.tools.shell.job.JobCommand +import com.dimajix.flowman.tools.shell.project.ProjectCommand +import com.dimajix.flowman.tools.shell.test.TestCommand import com.dimajix.flowman.tools.shell.history.HistoryCommand @@ -48,6 +49,7 @@ class ParsedCommand { new SubCommand(name="namespace",impl=classOf[NamespaceCommand]), new SubCommand(name="relation",impl=classOf[ModelCommand]), new SubCommand(name="target",impl=classOf[TargetCommand]), + new SubCommand(name="test",impl=classOf[TestCommand]), new SubCommand(name="project",impl=classOf[ProjectCommand]) )) var command:Command = _ diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/Shell.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/Shell.scala index 2779b54e7..8d5455204 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/Shell.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/Shell.scala @@ -124,7 +124,8 @@ class Shell(args:Arguments) extends StatefulTool( try { System.err.flush() System.out.flush() - val prompt = "flowman:" + project.name + job.map("/" + _.name).getOrElse("") + "> " + val context = job.map(_.name).orElse(test.map(_.name)) + val prompt = "flowman:" + project.name + context.map("/" + _).getOrElse("") + "> " console.readLine(prompt) val args = console.getParsedLine.words().asScala.filter(_.trim.nonEmpty) if (args.nonEmpty) { diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/history/SearchJobHistoryCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/history/SearchJobHistoryCommand.scala index 74131b303..3e2bbbbb5 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/history/SearchJobHistoryCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/history/SearchJobHistoryCommand.scala @@ -26,8 +26,8 @@ import com.dimajix.flowman.history.JobOrder import com.dimajix.flowman.history.JobQuery import com.dimajix.flowman.model.Project import com.dimajix.flowman.spec.splitSettings -import com.dimajix.flowman.tools.ConsoleUtils import com.dimajix.flowman.tools.exec.Command +import com.dimajix.flowman.util.ConsoleUtils class SearchJobHistoryCommand extends Command { diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/history/SearchTargetHistoryCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/history/SearchTargetHistoryCommand.scala index 7d4102a40..3bc0efe36 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/history/SearchTargetHistoryCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/history/SearchTargetHistoryCommand.scala @@ -25,8 +25,8 @@ import com.dimajix.flowman.execution.Status import com.dimajix.flowman.history.TargetOrder import com.dimajix.flowman.history.TargetQuery import com.dimajix.flowman.model.Project -import com.dimajix.flowman.tools.ConsoleUtils import com.dimajix.flowman.tools.exec.Command +import com.dimajix.flowman.util.ConsoleUtils class SearchTargetHistoryCommand extends Command { diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/job/EnterCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/job/EnterCommand.scala index 3b5f209e5..b2dc635a9 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/job/EnterCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/job/EnterCommand.scala @@ -22,6 +22,7 @@ import org.kohsuke.args4j.Argument import org.slf4j.LoggerFactory import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.NoSuchJobException import com.dimajix.flowman.execution.Session import com.dimajix.flowman.model.JobIdentifier import com.dimajix.flowman.model.Project @@ -46,6 +47,9 @@ class EnterCommand extends Command { true } catch { + case ex:NoSuchJobException => + logger.error(s"Cannot resolve job '${ex.job}'") + false case NonFatal(e) => logger.error(s"Error entering job '$job': ${e.getMessage}") false diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/job/JobCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/job/JobCommand.scala index 6b7066dad..06ebbab41 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/job/JobCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/job/JobCommand.scala @@ -26,8 +26,10 @@ import com.dimajix.flowman.tools.exec.NestedCommand import com.dimajix.flowman.tools.exec.job.BuildCommand import com.dimajix.flowman.tools.exec.job.CreateCommand import com.dimajix.flowman.tools.exec.job.DestroyCommand +import com.dimajix.flowman.tools.exec.job.InspectCommand import com.dimajix.flowman.tools.exec.job.ListCommand import com.dimajix.flowman.tools.exec.job.TruncateCommand +import com.dimajix.flowman.tools.exec.job.ValidateCommand import com.dimajix.flowman.tools.exec.job.VerifyCommand @@ -36,11 +38,12 @@ class JobCommand extends NestedCommand { @SubCommands(Array( new SubCommand(name="list",impl=classOf[ListCommand]), new SubCommand(name="create",impl=classOf[CreateCommand]), + new SubCommand(name="validate",impl=classOf[ValidateCommand]), new SubCommand(name="build",impl=classOf[BuildCommand]), new SubCommand(name="verify",impl=classOf[VerifyCommand]), new SubCommand(name="truncate",impl=classOf[TruncateCommand]), new SubCommand(name="destroy",impl=classOf[DestroyCommand]), - new SubCommand(name="info",impl=classOf[InfoCommand]), + new SubCommand(name="inspect",impl=classOf[InspectCommand]), new SubCommand(name="enter",impl=classOf[EnterCommand]), new SubCommand(name="leave",impl=classOf[LeaveCommand]) )) diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/project/ProjectCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/project/ProjectCommand.scala index 8a6f1e61a..b699b8902 100644 --- a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/project/ProjectCommand.scala +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/project/ProjectCommand.scala @@ -26,20 +26,23 @@ import com.dimajix.flowman.tools.exec.NestedCommand import com.dimajix.flowman.tools.exec.project.BuildCommand import com.dimajix.flowman.tools.exec.project.CreateCommand import com.dimajix.flowman.tools.exec.project.DestroyCommand +import com.dimajix.flowman.tools.exec.project.InspectCommand import com.dimajix.flowman.tools.exec.project.TruncateCommand +import com.dimajix.flowman.tools.exec.project.ValidateCommand import com.dimajix.flowman.tools.exec.project.VerifyCommand class ProjectCommand extends NestedCommand { @Argument(required=true,index=0,metaVar="",usage="the subcommand to run",handler=classOf[SubCommandHandler]) @SubCommands(Array( + new SubCommand(name="validate",impl=classOf[ValidateCommand]), new SubCommand(name="create",impl=classOf[CreateCommand]), new SubCommand(name="migrate",impl=classOf[CreateCommand]), new SubCommand(name="build",impl=classOf[BuildCommand]), new SubCommand(name="verify",impl=classOf[VerifyCommand]), new SubCommand(name="truncate",impl=classOf[TruncateCommand]), new SubCommand(name="destroy",impl=classOf[DestroyCommand]), - new SubCommand(name="info",impl=classOf[InfoCommand]), + new SubCommand(name="inspect",impl=classOf[InspectCommand]), new SubCommand(name="load",impl=classOf[LoadCommand]), new SubCommand(name="reload",impl=classOf[ReloadCommand]) )) diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/EnterCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/EnterCommand.scala new file mode 100644 index 000000000..f8334fa47 --- /dev/null +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/EnterCommand.scala @@ -0,0 +1,54 @@ +/* + * Copyright 2018-2019 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.tools.shell.test + +import scala.util.control.NonFatal + +import org.kohsuke.args4j.Argument +import org.slf4j.LoggerFactory + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.NoSuchTestException +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.TestIdentifier +import com.dimajix.flowman.tools.exec.Command +import com.dimajix.flowman.tools.shell.Shell + + +class EnterCommand extends Command { + private val logger = LoggerFactory.getLogger(classOf[EnterCommand]) + + @Argument(index=0, required=true, usage = "name of test to enter", metaVar = "") + var test: String = "" + + override def execute(session: Session, project:Project, context:Context): Boolean = { + try { + val test = context.getTest(TestIdentifier(this.test)) + Shell.instance.enterTest(test) + true + } + catch { + case ex:NoSuchTestException => + logger.error(s"Cannot resolve test '${ex.test}'") + false + case NonFatal(e) => + logger.error(s"Error entering test '$test': ${e.getMessage}") + false + } + } +} diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/InspectCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/InspectCommand.scala new file mode 100644 index 000000000..d542cde8c --- /dev/null +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/InspectCommand.scala @@ -0,0 +1,83 @@ +/* + * Copyright 2020 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.tools.shell.test + +import scala.util.control.NonFatal + +import org.kohsuke.args4j.Argument +import org.slf4j.LoggerFactory + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.NoSuchJobException +import com.dimajix.flowman.execution.NoSuchTestException +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.model.TestIdentifier +import com.dimajix.flowman.tools.exec.Command + + +class InspectCommand extends Command { + private val logger = LoggerFactory.getLogger(classOf[InspectCommand]) + + @Argument(index=0, required=true, usage = "name of test to inspect", metaVar = "") + var test: String = "" + + override def execute(session: Session, project:Project, context:Context): Boolean = { + try { + val test = context.getTest(TestIdentifier(this.test)) + println(s"Name: ${test.name}") + println(s"Description: ${test.description}") + println("Environment:") + test.environment + .toSeq + .sortBy(_._1) + .foreach{ case(k,v) => println(s" $k=$v") } + println("Mapping Overrides:") + test.overrideMappings.keySet + .toSeq + .sorted + .foreach{ p => println(s" $p") } + println("Relation Overrides:") + test.overrideRelations.keySet + .toSeq + .sorted + .foreach{ p => println(s" $p") } + println("Fixture Targets:") + test.fixtures.keySet + .toSeq + .sorted + .foreach{ p => println(s" $p") } + println("Build Targets:") + test.targets + .foreach{ p => println(s" $p") } + println("Assertions:") + test.assertions.keySet + .toSeq + .sorted + .foreach{ p => println(s" $p") } + true + } + catch { + case ex:NoSuchTestException => + logger.error(s"Cannot resolve test '${ex.test}'") + false + case NonFatal(e) => + logger.error(s"Error '$test': ${e.getMessage}") + false + } + } +} diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/LeaveCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/LeaveCommand.scala new file mode 100644 index 000000000..fe88daf5a --- /dev/null +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/LeaveCommand.scala @@ -0,0 +1,31 @@ +/* + * Copyright 2018-2019 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.tools.shell.test + +import com.dimajix.flowman.execution.Context +import com.dimajix.flowman.execution.Session +import com.dimajix.flowman.model.Project +import com.dimajix.flowman.tools.exec.Command +import com.dimajix.flowman.tools.shell.Shell + + +class LeaveCommand extends Command { + override def execute(session: Session, project:Project, context:Context): Boolean = { + Shell.instance.leaveTest() + true + } +} diff --git a/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/TestCommand.scala b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/TestCommand.scala new file mode 100644 index 000000000..31c8968ee --- /dev/null +++ b/flowman-tools/src/main/scala/com/dimajix/flowman/tools/shell/test/TestCommand.scala @@ -0,0 +1,40 @@ +/* + * Copyright 2018 Kaya Kupferschmidt + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.dimajix.flowman.tools.shell.test + +import org.kohsuke.args4j.Argument +import org.kohsuke.args4j.spi.SubCommand +import org.kohsuke.args4j.spi.SubCommandHandler +import org.kohsuke.args4j.spi.SubCommands + +import com.dimajix.flowman.tools.exec.Command +import com.dimajix.flowman.tools.exec.NestedCommand +import com.dimajix.flowman.tools.exec.test.ListCommand +import com.dimajix.flowman.tools.exec.test.RunCommand + + +class TestCommand extends NestedCommand { + @Argument(required=true,index=0,metaVar="",usage="the subcommand to run",handler=classOf[SubCommandHandler]) + @SubCommands(Array( + new SubCommand(name="list",impl=classOf[ListCommand]), + new SubCommand(name="run",impl=classOf[RunCommand]), + new SubCommand(name="inspect",impl=classOf[InspectCommand]), + new SubCommand(name="enter",impl=classOf[EnterCommand]), + new SubCommand(name="leave",impl=classOf[LeaveCommand]) + )) + override var command:Command = _ +} diff --git a/flowman-tools/src/test/scala/com/dimajix/flowman/tools/ConsoleUtilsTest.scala b/flowman-tools/src/test/scala/com/dimajix/flowman/tools/ConsoleUtilsTest.scala index 8c4ed5e76..db535d9fb 100644 --- a/flowman-tools/src/test/scala/com/dimajix/flowman/tools/ConsoleUtilsTest.scala +++ b/flowman-tools/src/test/scala/com/dimajix/flowman/tools/ConsoleUtilsTest.scala @@ -16,15 +16,16 @@ package com.dimajix.flowman.tools -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers import com.dimajix.flowman.execution.Phase import com.dimajix.flowman.execution.Status import com.dimajix.flowman.history.JobState +import com.dimajix.flowman.util.ConsoleUtils -class ConsoleUtilsTest extends FlatSpec with Matchers { +class ConsoleUtilsTest extends AnyFlatSpec with Matchers { "ConsoleUtils" should "correctly format a list of JobStates" in { val columns = Seq("id", "namespace", "project", "job", "phase", "args", "status", "start_dt", "end_dt") val records = Seq(JobState("123", "default", "p1", "some_job", Phase.BUILD, Map("arg1" -> "val1"), Status.SUCCESS, None, None)) diff --git a/flowman-tools/src/test/scala/com/dimajix/flowman/tools/exec/DriverTest.scala b/flowman-tools/src/test/scala/com/dimajix/flowman/tools/exec/DriverTest.scala index 56c6d5a46..9b28b1ca8 100644 --- a/flowman-tools/src/test/scala/com/dimajix/flowman/tools/exec/DriverTest.scala +++ b/flowman-tools/src/test/scala/com/dimajix/flowman/tools/exec/DriverTest.scala @@ -17,11 +17,11 @@ package com.dimajix.flowman.tools.exec import org.kohsuke.args4j.CmdLineException -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class DriverTest extends FlatSpec with Matchers { +class DriverTest extends AnyFlatSpec with Matchers { "The Driver" should "fail with an exception on wrong arguments" in { Driver.run() should be (true) diff --git a/flowman-tools/src/test/scala/com/dimajix/flowman/tools/shell/CommandCompleterTest.scala b/flowman-tools/src/test/scala/com/dimajix/flowman/tools/shell/CommandCompleterTest.scala index 0e2ece979..55f171c17 100644 --- a/flowman-tools/src/test/scala/com/dimajix/flowman/tools/shell/CommandCompleterTest.scala +++ b/flowman-tools/src/test/scala/com/dimajix/flowman/tools/shell/CommandCompleterTest.scala @@ -16,14 +16,12 @@ package com.dimajix.flowman.tools.shell -import scala.collection.JavaConverters._ - import org.jline.reader.Candidate -import org.scalatest.FlatSpec -import org.scalatest.Matchers +import org.scalatest.flatspec.AnyFlatSpec +import org.scalatest.matchers.should.Matchers -class CommandCompleterTest extends FlatSpec with Matchers { +class CommandCompleterTest extends AnyFlatSpec with Matchers { "The CommandCompleter" should "work" in { val completer = new CommandCompleter() val candidates = new java.util.LinkedList[Candidate]() diff --git a/flowman-ui/package-lock.json b/flowman-ui/package-lock.json index ebb3c7686..d28c9bcca 100644 --- a/flowman-ui/package-lock.json +++ b/flowman-ui/package-lock.json @@ -5,866 +5,1063 @@ "requires": true, "dependencies": { "@babel/code-frame": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.0.0.tgz", - "integrity": "sha512-OfC2uemaknXr87bdLUkWog7nYuliM9Ij5HUcajsVcMCpQrcLmtxRbVFTIqmcSkSeYRBFBRxs2FiUqFJDLdiebA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.13.tgz", + "integrity": "sha512-HV1Cm0Q3ZrpCR93tkWOYiuYIgLxZXZFVG2VgK+MBWjUqZTundupbfx2aXarXuw5Ko5aMcjtJgbSs4vUGBS5v6g==", "dev": true, "requires": { - "@babel/highlight": "^7.0.0" + "@babel/highlight": "^7.12.13" } }, + "@babel/compat-data": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.12.13.tgz", + "integrity": "sha512-U/hshG5R+SIoW7HVWIdmy1cB7s3ki+r3FpyEZiCgpi4tFgPnX/vynY80ZGSASOIrUM6O7VxOgCZgdt7h97bUGg==", + "dev": true + }, "@babel/core": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.5.4.tgz", - "integrity": "sha512-+DaeBEpYq6b2+ZmHx3tHspC+ZRflrvLqwfv8E3hNr5LVQoyBnL8RPKSBCg+rK2W2My9PWlujBiqd0ZPsR9Q6zQ==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.0.0", - "@babel/generator": "^7.5.0", - "@babel/helpers": "^7.5.4", - "@babel/parser": "^7.5.0", - "@babel/template": "^7.4.4", - "@babel/traverse": "^7.5.0", - "@babel/types": "^7.5.0", - "convert-source-map": "^1.1.0", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.13.tgz", + "integrity": "sha512-BQKE9kXkPlXHPeqissfxo0lySWJcYdEP0hdtJOH/iJfDdhOCcgtNCjftCJg3qqauB4h+lz2N6ixM++b9DN1Tcw==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.12.13", + "@babel/generator": "^7.12.13", + "@babel/helper-module-transforms": "^7.12.13", + "@babel/helpers": "^7.12.13", + "@babel/parser": "^7.12.13", + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.12.13", + "@babel/types": "^7.12.13", + "convert-source-map": "^1.7.0", "debug": "^4.1.0", - "json5": "^2.1.0", - "lodash": "^4.17.11", - "resolve": "^1.3.2", + "gensync": "^1.0.0-beta.1", + "json5": "^2.1.2", + "lodash": "^4.17.19", "semver": "^5.4.1", "source-map": "^0.5.0" } }, "@babel/generator": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.5.0.tgz", - "integrity": "sha512-1TTVrt7J9rcG5PMjvO7VEG3FrEoEJNHxumRq66GemPmzboLWtIjjcJgk8rokuAS7IiRSpgVSu5Vb9lc99iJkOA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.12.13.tgz", + "integrity": "sha512-9qQ8Fgo8HaSvHEt6A5+BATP7XktD/AdAnObUeTRz5/e2y3kbrxZgz32qUJJsdmwUvBJzF4AeV21nGTNwv05Mpw==", "dev": true, "requires": { - "@babel/types": "^7.5.0", + "@babel/types": "^7.12.13", "jsesc": "^2.5.1", - "lodash": "^4.17.11", - "source-map": "^0.5.0", - "trim-right": "^1.0.1" + "source-map": "^0.5.0" } }, "@babel/helper-annotate-as-pure": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.0.0.tgz", - "integrity": "sha512-3UYcJUj9kvSLbLbUIfQTqzcy5VX7GRZ/CCDrnOaZorFFM01aXp1+GJwuFGV4NDDoAS+mOUyHcO6UD/RfqOks3Q==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.12.13.tgz", + "integrity": "sha512-7YXfX5wQ5aYM/BOlbSccHDbuXXFPxeoUmfWtz8le2yTkTZc+BxsiEnENFoi2SlmA8ewDkG2LgIMIVzzn2h8kfw==", "dev": true, "requires": { - "@babel/types": "^7.0.0" + "@babel/types": "^7.12.13" } }, "@babel/helper-builder-binary-assignment-operator-visitor": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.1.0.tgz", - "integrity": "sha512-qNSR4jrmJ8M1VMM9tibvyRAHXQs2PmaksQF7c1CGJNipfe3D8p+wgNwgso/P2A2r2mdgBWAXljNWR0QRZAMW8w==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.12.13.tgz", + "integrity": "sha512-CZOv9tGphhDRlVjVkAgm8Nhklm9RzSmWpX2my+t7Ua/KT616pEzXsQCjinzvkRvHWJ9itO4f296efroX23XCMA==", "dev": true, "requires": { - "@babel/helper-explode-assignable-expression": "^7.1.0", - "@babel/types": "^7.0.0" + "@babel/helper-explode-assignable-expression": "^7.12.13", + "@babel/types": "^7.12.13" } }, - "@babel/helper-call-delegate": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-call-delegate/-/helper-call-delegate-7.4.4.tgz", - "integrity": "sha512-l79boDFJ8S1c5hvQvG+rc+wHw6IuH7YldmRKsYtpbawsxURu/paVy57FZMomGK22/JckepaikOkY0MoAmdyOlQ==", + "@babel/helper-compilation-targets": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.12.13.tgz", + "integrity": "sha512-dXof20y/6wB5HnLOGyLh/gobsMvDNoekcC+8MCV2iaTd5JemhFkPD73QB+tK3iFC9P0xJC73B6MvKkyUfS9cCw==", "dev": true, "requires": { - "@babel/helper-hoist-variables": "^7.4.4", - "@babel/traverse": "^7.4.4", - "@babel/types": "^7.4.4" + "@babel/compat-data": "^7.12.13", + "@babel/helper-validator-option": "^7.12.11", + "browserslist": "^4.14.5", + "semver": "^5.5.0" } }, "@babel/helper-create-class-features-plugin": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.5.0.tgz", - "integrity": "sha512-EAoMc3hE5vE5LNhMqDOwB1usHvmRjCDAnH8CD4PVkX9/Yr3W/tcz8xE8QvdZxfsFBDICwZnF2UTHIqslRpvxmA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.12.13.tgz", + "integrity": "sha512-Vs/e9wv7rakKYeywsmEBSRC9KtmE7Px+YBlESekLeJOF0zbGUicGfXSNi3o+tfXSNS48U/7K9mIOOCR79Cl3+Q==", "dev": true, "requires": { - "@babel/helper-function-name": "^7.1.0", - "@babel/helper-member-expression-to-functions": "^7.0.0", - "@babel/helper-optimise-call-expression": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-replace-supers": "^7.4.4", - "@babel/helper-split-export-declaration": "^7.4.4" + "@babel/helper-function-name": "^7.12.13", + "@babel/helper-member-expression-to-functions": "^7.12.13", + "@babel/helper-optimise-call-expression": "^7.12.13", + "@babel/helper-replace-supers": "^7.12.13", + "@babel/helper-split-export-declaration": "^7.12.13" } }, - "@babel/helper-define-map": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-define-map/-/helper-define-map-7.4.4.tgz", - "integrity": "sha512-IX3Ln8gLhZpSuqHJSnTNBWGDE9kdkTEWl21A/K7PQ00tseBwbqCHTvNLHSBd9M0R5rER4h5Rsvj9vw0R5SieBg==", + "@babel/helper-create-regexp-features-plugin": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.12.13.tgz", + "integrity": "sha512-XC+kiA0J3at6E85dL5UnCYfVOcIZ834QcAY0TIpgUVnz0zDzg+0TtvZTnJ4g9L1dPRGe30Qi03XCIS4tYCLtqw==", "dev": true, "requires": { - "@babel/helper-function-name": "^7.1.0", - "@babel/types": "^7.4.4", - "lodash": "^4.17.11" + "@babel/helper-annotate-as-pure": "^7.12.13", + "regexpu-core": "^4.7.1" } }, "@babel/helper-explode-assignable-expression": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.1.0.tgz", - "integrity": "sha512-NRQpfHrJ1msCHtKjbzs9YcMmJZOg6mQMmGRB+hbamEdG5PNpaSm95275VD92DvJKuyl0s2sFiDmMZ+EnnvufqA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.12.13.tgz", + "integrity": "sha512-5loeRNvMo9mx1dA/d6yNi+YiKziJZFylZnCo1nmFF4qPU4yJ14abhWESuSMQSlQxWdxdOFzxXjk/PpfudTtYyw==", "dev": true, "requires": { - "@babel/traverse": "^7.1.0", - "@babel/types": "^7.0.0" + "@babel/types": "^7.12.13" } }, "@babel/helper-function-name": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.1.0.tgz", - "integrity": "sha512-A95XEoCpb3TO+KZzJ4S/5uW5fNe26DjBGqf1o9ucyLyCmi1dXq/B3c8iaWTfBk3VvetUxl16e8tIrd5teOCfGw==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.12.13.tgz", + "integrity": "sha512-TZvmPn0UOqmvi5G4vvw0qZTpVptGkB1GL61R6lKvrSdIxGm5Pky7Q3fpKiIkQCAtRCBUwB0PaThlx9vebCDSwA==", "dev": true, "requires": { - "@babel/helper-get-function-arity": "^7.0.0", - "@babel/template": "^7.1.0", - "@babel/types": "^7.0.0" + "@babel/helper-get-function-arity": "^7.12.13", + "@babel/template": "^7.12.13", + "@babel/types": "^7.12.13" } }, "@babel/helper-get-function-arity": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.0.0.tgz", - "integrity": "sha512-r2DbJeg4svYvt3HOS74U4eWKsUAMRH01Z1ds1zx8KNTPtpTL5JAsdFv8BNyOpVqdFhHkkRDIg5B4AsxmkjAlmQ==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.12.13.tgz", + "integrity": "sha512-DjEVzQNz5LICkzN0REdpD5prGoidvbdYk1BVgRUOINaWJP2t6avB27X1guXK1kXNrX0WMfsrm1A/ZBthYuIMQg==", "dev": true, "requires": { - "@babel/types": "^7.0.0" + "@babel/types": "^7.12.13" } }, "@babel/helper-hoist-variables": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.4.4.tgz", - "integrity": "sha512-VYk2/H/BnYbZDDg39hr3t2kKyifAm1W6zHRfhx8jGjIHpQEBv9dry7oQ2f3+J703TLu69nYdxsovl0XYfcnK4w==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.12.13.tgz", + "integrity": "sha512-KSC5XSj5HreRhYQtZ3cnSnQwDzgnbdUDEFsxkN0m6Q3WrCRt72xrnZ8+h+pX7YxM7hr87zIO3a/v5p/H3TrnVw==", "dev": true, "requires": { - "@babel/types": "^7.4.4" + "@babel/types": "^7.12.13" } }, "@babel/helper-member-expression-to-functions": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.0.0.tgz", - "integrity": "sha512-avo+lm/QmZlv27Zsi0xEor2fKcqWG56D5ae9dzklpIaY7cQMK5N8VSpaNVPPagiqmy7LrEjK1IWdGMOqPu5csg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.13.tgz", + "integrity": "sha512-B+7nN0gIL8FZ8SvMcF+EPyB21KnCcZHQZFczCxbiNGV/O0rsrSBlWGLzmtBJ3GMjSVMIm4lpFhR+VdVBuIsUcQ==", "dev": true, "requires": { - "@babel/types": "^7.0.0" + "@babel/types": "^7.12.13" } }, "@babel/helper-module-imports": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.0.0.tgz", - "integrity": "sha512-aP/hlLq01DWNEiDg4Jn23i+CXxW/owM4WpDLFUbpjxe4NS3BhLVZQ5i7E0ZrxuQ/vwekIeciyamgB1UIYxxM6A==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.12.13.tgz", + "integrity": "sha512-NGmfvRp9Rqxy0uHSSVP+SRIW1q31a7Ji10cLBcqSDUngGentY4FRiHOFZFE1CLU5eiL0oE8reH7Tg1y99TDM/g==", "dev": true, "requires": { - "@babel/types": "^7.0.0" + "@babel/types": "^7.12.13" } }, "@babel/helper-module-transforms": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.4.4.tgz", - "integrity": "sha512-3Z1yp8TVQf+B4ynN7WoHPKS8EkdTbgAEy0nU0rs/1Kw4pDgmvYH3rz3aI11KgxKCba2cn7N+tqzV1mY2HMN96w==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.12.13.tgz", + "integrity": "sha512-acKF7EjqOR67ASIlDTupwkKM1eUisNAjaSduo5Cz+793ikfnpe7p4Q7B7EWU2PCoSTPWsQkR7hRUWEIZPiVLGA==", "dev": true, "requires": { - "@babel/helper-module-imports": "^7.0.0", - "@babel/helper-simple-access": "^7.1.0", - "@babel/helper-split-export-declaration": "^7.4.4", - "@babel/template": "^7.4.4", - "@babel/types": "^7.4.4", - "lodash": "^4.17.11" + "@babel/helper-module-imports": "^7.12.13", + "@babel/helper-replace-supers": "^7.12.13", + "@babel/helper-simple-access": "^7.12.13", + "@babel/helper-split-export-declaration": "^7.12.13", + "@babel/helper-validator-identifier": "^7.12.11", + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.12.13", + "@babel/types": "^7.12.13", + "lodash": "^4.17.19" } }, "@babel/helper-optimise-call-expression": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.0.0.tgz", - "integrity": "sha512-u8nd9NQePYNQV8iPWu/pLLYBqZBa4ZaY1YWRFMuxrid94wKI1QNt67NEZ7GAe5Kc/0LLScbim05xZFWkAdrj9g==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.13.tgz", + "integrity": "sha512-BdWQhoVJkp6nVjB7nkFWcn43dkprYauqtk++Py2eaf/GRDFm5BxRqEIZCiHlZUGAVmtwKcsVL1dC68WmzeFmiA==", "dev": true, "requires": { - "@babel/types": "^7.0.0" + "@babel/types": "^7.12.13" } }, "@babel/helper-plugin-utils": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.0.0.tgz", - "integrity": "sha512-CYAOUCARwExnEixLdB6sDm2dIJ/YgEAKDM1MOeMeZu9Ld/bDgVo8aiWrXwcY7OBh+1Ea2uUcVRcxKk0GJvW7QA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.12.13.tgz", + "integrity": "sha512-C+10MXCXJLiR6IeG9+Wiejt9jmtFpxUc3MQqCmPY8hfCjyUGl9kT+B2okzEZrtykiwrc4dbCPdDoz0A/HQbDaA==", "dev": true }, - "@babel/helper-regex": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-regex/-/helper-regex-7.4.4.tgz", - "integrity": "sha512-Y5nuB/kESmR3tKjU8Nkn1wMGEx1tjJX076HBMeL3XLQCu6vA/YRzuTW0bbb+qRnXvQGn+d6Rx953yffl8vEy7Q==", + "@babel/helper-remap-async-to-generator": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.12.13.tgz", + "integrity": "sha512-Qa6PU9vNcj1NZacZZI1Mvwt+gXDH6CTfgAkSjeRMLE8HxtDK76+YDId6NQR+z7Rgd5arhD2cIbS74r0SxD6PDA==", "dev": true, "requires": { - "lodash": "^4.17.11" + "@babel/helper-annotate-as-pure": "^7.12.13", + "@babel/helper-wrap-function": "^7.12.13", + "@babel/types": "^7.12.13" } }, - "@babel/helper-remap-async-to-generator": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.1.0.tgz", - "integrity": "sha512-3fOK0L+Fdlg8S5al8u/hWE6vhufGSn0bN09xm2LXMy//REAF8kDCrYoOBKYmA8m5Nom+sV9LyLCwrFynA8/slg==", + "@babel/helper-replace-supers": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.12.13.tgz", + "integrity": "sha512-pctAOIAMVStI2TMLhozPKbf5yTEXc0OJa0eENheb4w09SrgOWEs+P4nTOZYJQCqs8JlErGLDPDJTiGIp3ygbLg==", "dev": true, "requires": { - "@babel/helper-annotate-as-pure": "^7.0.0", - "@babel/helper-wrap-function": "^7.1.0", - "@babel/template": "^7.1.0", - "@babel/traverse": "^7.1.0", - "@babel/types": "^7.0.0" + "@babel/helper-member-expression-to-functions": "^7.12.13", + "@babel/helper-optimise-call-expression": "^7.12.13", + "@babel/traverse": "^7.12.13", + "@babel/types": "^7.12.13" } }, - "@babel/helper-replace-supers": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.4.4.tgz", - "integrity": "sha512-04xGEnd+s01nY1l15EuMS1rfKktNF+1CkKmHoErDppjAAZL+IUBZpzT748x262HF7fibaQPhbvWUl5HeSt1EXg==", + "@babel/helper-simple-access": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.12.13.tgz", + "integrity": "sha512-0ski5dyYIHEfwpWGx5GPWhH35j342JaflmCeQmsPWcrOQDtCN6C1zKAVRFVbK53lPW2c9TsuLLSUDf0tIGJ5hA==", "dev": true, "requires": { - "@babel/helper-member-expression-to-functions": "^7.0.0", - "@babel/helper-optimise-call-expression": "^7.0.0", - "@babel/traverse": "^7.4.4", - "@babel/types": "^7.4.4" + "@babel/types": "^7.12.13" } }, - "@babel/helper-simple-access": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.1.0.tgz", - "integrity": "sha512-Vk+78hNjRbsiu49zAPALxTb+JUQCz1aolpd8osOF16BGnLtseD21nbHgLPGUwrXEurZgiCOUmvs3ExTu4F5x6w==", + "@babel/helper-skip-transparent-expression-wrappers": { + "version": "7.12.1", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.12.1.tgz", + "integrity": "sha512-Mf5AUuhG1/OCChOJ/HcADmvcHM42WJockombn8ATJG3OnyiSxBK/Mm5x78BQWvmtXZKHgbjdGL2kin/HOLlZGA==", "dev": true, "requires": { - "@babel/template": "^7.1.0", - "@babel/types": "^7.0.0" + "@babel/types": "^7.12.1" } }, "@babel/helper-split-export-declaration": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.4.4.tgz", - "integrity": "sha512-Ro/XkzLf3JFITkW6b+hNxzZ1n5OQ80NvIUdmHspih1XAhtN3vPTuUFT4eQnela+2MaZ5ulH+iyP513KJrxbN7Q==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.12.13.tgz", + "integrity": "sha512-tCJDltF83htUtXx5NLcaDqRmknv652ZWCHyoTETf1CXYJdPC7nohZohjUgieXhv0hTJdRf2FjDueFehdNucpzg==", "dev": true, "requires": { - "@babel/types": "^7.4.4" + "@babel/types": "^7.12.13" } }, + "@babel/helper-validator-identifier": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", + "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==", + "dev": true + }, + "@babel/helper-validator-option": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.12.11.tgz", + "integrity": "sha512-TBFCyj939mFSdeX7U7DDj32WtzYY7fDcalgq8v3fBZMNOJQNn7nOYzMaUCiPxPYfCup69mtIpqlKgMZLvQ8Xhw==", + "dev": true + }, "@babel/helper-wrap-function": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.2.0.tgz", - "integrity": "sha512-o9fP1BZLLSrYlxYEYyl2aS+Flun5gtjTIG8iln+XuEzQTs0PLagAGSXUcqruJwD5fM48jzIEggCKpIfWTcR7pQ==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.12.13.tgz", + "integrity": "sha512-t0aZFEmBJ1LojdtJnhOaQEVejnzYhyjWHSsNSNo8vOYRbAJNh6r6GQF7pd36SqG7OKGbn+AewVQ/0IfYfIuGdw==", "dev": true, "requires": { - "@babel/helper-function-name": "^7.1.0", - "@babel/template": "^7.1.0", - "@babel/traverse": "^7.1.0", - "@babel/types": "^7.2.0" + "@babel/helper-function-name": "^7.12.13", + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.12.13", + "@babel/types": "^7.12.13" } }, "@babel/helpers": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.5.4.tgz", - "integrity": "sha512-6LJ6xwUEJP51w0sIgKyfvFMJvIb9mWAfohJp0+m6eHJigkFdcH8duZ1sfhn0ltJRzwUIT/yqqhdSfRpCpL7oow==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.12.13.tgz", + "integrity": "sha512-oohVzLRZ3GQEk4Cjhfs9YkJA4TdIDTObdBEZGrd6F/T0GPSnuV6l22eMcxlvcvzVIPH3VTtxbseudM1zIE+rPQ==", "dev": true, "requires": { - "@babel/template": "^7.4.4", - "@babel/traverse": "^7.5.0", - "@babel/types": "^7.5.0" + "@babel/template": "^7.12.13", + "@babel/traverse": "^7.12.13", + "@babel/types": "^7.12.13" } }, "@babel/highlight": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.5.0.tgz", - "integrity": "sha512-7dV4eu9gBxoM0dAnj/BCFDW9LFU0zvTrkq0ugM7pnHEgguOEeOz1so2ZghEdzviYzQEED0r4EAgpsBChKy1TRQ==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.12.13.tgz", + "integrity": "sha512-kocDQvIbgMKlWxXe9fof3TQ+gkIPOUSEYhJjqUjvKMez3krV7vbzYCDq39Oj11UAVK7JqPVGQPlgE85dPNlQww==", "dev": true, "requires": { + "@babel/helper-validator-identifier": "^7.12.11", "chalk": "^2.0.0", - "esutils": "^2.0.2", "js-tokens": "^4.0.0" } }, "@babel/parser": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.5.0.tgz", - "integrity": "sha512-I5nW8AhGpOXGCCNYGc+p7ExQIBxRFnS2fd/d862bNOKvmoEPjYPcfIjsfdy0ujagYOIYPczKgD9l3FsgTkAzKA==", + "version": "7.12.14", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.12.14.tgz", + "integrity": "sha512-xcfxDq3OrBnDsA/Z8eK5/2iPcLD8qbOaSSfOw4RA6jp4i7e6dEQ7+wTwxItEwzcXPQcsry5nZk96gmVPKletjQ==", "dev": true }, "@babel/plugin-proposal-async-generator-functions": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.2.0.tgz", - "integrity": "sha512-+Dfo/SCQqrwx48ptLVGLdE39YtWRuKc/Y9I5Fy0P1DDBB9lsAHpjcEJQt+4IifuSOSTLBKJObJqMvaO1pIE8LQ==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.12.13.tgz", + "integrity": "sha512-1KH46Hx4WqP77f978+5Ye/VUbuwQld2hph70yaw2hXS2v7ER2f3nlpNMu909HO2rbvP0NKLlMVDPh9KXklVMhA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-remap-async-to-generator": "^7.1.0", - "@babel/plugin-syntax-async-generators": "^7.2.0" + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/helper-remap-async-to-generator": "^7.12.13", + "@babel/plugin-syntax-async-generators": "^7.8.0" } }, "@babel/plugin-proposal-class-properties": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.5.0.tgz", - "integrity": "sha512-9L/JfPCT+kShiiTTzcnBJ8cOwdKVmlC1RcCf9F0F9tERVrM4iWtWnXtjWCRqNm2la2BxO1MPArWNsU9zsSJWSQ==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.12.13.tgz", + "integrity": "sha512-8SCJ0Ddrpwv4T7Gwb33EmW1V9PY5lggTO+A8WjyIwxrSHDUyBw4MtF96ifn1n8H806YlxbVCoKXbbmzD6RD+cA==", "dev": true, "requires": { - "@babel/helper-create-class-features-plugin": "^7.5.0", - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-create-class-features-plugin": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-proposal-decorators": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.4.4.tgz", - "integrity": "sha512-z7MpQz3XC/iQJWXH9y+MaWcLPNSMY9RQSthrLzak8R8hCj0fuyNk+Dzi9kfNe/JxxlWQ2g7wkABbgWjW36MTcw==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-decorators/-/plugin-proposal-decorators-7.12.13.tgz", + "integrity": "sha512-x2aOr5w4ARJoYHFKoG2iEUL/Xe99JAJXjAasHijXp3/KgaetJXGE62SmHgsW3Tia/XUT5AxF2YC0F+JyhPY/0Q==", "dev": true, "requires": { - "@babel/helper-create-class-features-plugin": "^7.4.4", - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/plugin-syntax-decorators": "^7.2.0" + "@babel/helper-create-class-features-plugin": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/plugin-syntax-decorators": "^7.12.13" + } + }, + "@babel/plugin-proposal-dynamic-import": { + "version": "7.12.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.12.1.tgz", + "integrity": "sha512-a4rhUSZFuq5W8/OO8H7BL5zspjnc1FLd9hlOxIK/f7qG4a0qsqk8uvF/ywgBA8/OmjsapjpvaEOYItfGG1qIvQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.10.4", + "@babel/plugin-syntax-dynamic-import": "^7.8.0" + } + }, + "@babel/plugin-proposal-export-namespace-from": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.12.13.tgz", + "integrity": "sha512-INAgtFo4OnLN3Y/j0VwAgw3HDXcDtX+C/erMvWzuV9v71r7urb6iyMXu7eM9IgLr1ElLlOkaHjJ0SbCmdOQ3Iw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3" } }, "@babel/plugin-proposal-json-strings": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.2.0.tgz", - "integrity": "sha512-MAFV1CA/YVmYwZG0fBQyXhmj0BHCB5egZHCKWIFVv/XCxAeVGIHfos3SwDck4LvCllENIAg7xMKOG5kH0dzyUg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.12.13.tgz", + "integrity": "sha512-v9eEi4GiORDg8x+Dmi5r8ibOe0VXoKDeNPYcTTxdGN4eOWikrJfDJCJrr1l5gKGvsNyGJbrfMftC2dTL6oz7pg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/plugin-syntax-json-strings": "^7.2.0" + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/plugin-syntax-json-strings": "^7.8.0" + } + }, + "@babel/plugin-proposal-logical-assignment-operators": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.12.13.tgz", + "integrity": "sha512-fqmiD3Lz7jVdK6kabeSr1PZlWSUVqSitmHEe3Z00dtGTKieWnX9beafvavc32kjORa5Bai4QNHgFDwWJP+WtSQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" + } + }, + "@babel/plugin-proposal-nullish-coalescing-operator": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.12.13.tgz", + "integrity": "sha512-Qoxpy+OxhDBI5kRqliJFAl4uWXk3Bn24WeFstPH0iLymFehSAUR8MHpqU7njyXv/qbo7oN6yTy5bfCmXdKpo1Q==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.0" + } + }, + "@babel/plugin-proposal-numeric-separator": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.12.13.tgz", + "integrity": "sha512-O1jFia9R8BUCl3ZGB7eitaAPu62TXJRHn7rh+ojNERCFyqRwJMTmhz+tJ+k0CwI6CLjX/ee4qW74FSqlq9I35w==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/plugin-syntax-numeric-separator": "^7.10.4" } }, "@babel/plugin-proposal-object-rest-spread": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.5.4.tgz", - "integrity": "sha512-KCx0z3y7y8ipZUMAEEJOyNi11lMb/FOPUjjB113tfowgw0c16EGYos7worCKBcUAh2oG+OBnoUhsnTSoLpV9uA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.12.13.tgz", + "integrity": "sha512-WvA1okB/0OS/N3Ldb3sziSrXg6sRphsBgqiccfcQq7woEn5wQLNX82Oc4PlaFcdwcWHuQXAtb8ftbS8Fbsg/sg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/plugin-syntax-object-rest-spread": "^7.2.0" + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/plugin-syntax-object-rest-spread": "^7.8.0", + "@babel/plugin-transform-parameters": "^7.12.13" } }, "@babel/plugin-proposal-optional-catch-binding": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.2.0.tgz", - "integrity": "sha512-mgYj3jCcxug6KUcX4OBoOJz3CMrwRfQELPQ5560F70YQUBZB7uac9fqaWamKR1iWUzGiK2t0ygzjTScZnVz75g==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.12.13.tgz", + "integrity": "sha512-9+MIm6msl9sHWg58NvqpNpLtuFbmpFYk37x8kgnGzAHvX35E1FyAwSUt5hIkSoWJFSAH+iwU8bJ4fcD1zKXOzg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/plugin-syntax-optional-catch-binding": "^7.2.0" + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.0" + } + }, + "@babel/plugin-proposal-optional-chaining": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.12.13.tgz", + "integrity": "sha512-0ZwjGfTcnZqyV3y9DSD1Yk3ebp+sIUpT2YDqP8hovzaNZnQq2Kd7PEqa6iOIUDBXBt7Jl3P7YAcEIL5Pz8u09Q==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/helper-skip-transparent-expression-wrappers": "^7.12.1", + "@babel/plugin-syntax-optional-chaining": "^7.8.0" + } + }, + "@babel/plugin-proposal-private-methods": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.12.13.tgz", + "integrity": "sha512-sV0V57uUwpauixvR7s2o75LmwJI6JECwm5oPUY5beZB1nBl2i37hc7CJGqB5G+58fur5Y6ugvl3LRONk5x34rg==", + "dev": true, + "requires": { + "@babel/helper-create-class-features-plugin": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-proposal-unicode-property-regex": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.4.4.tgz", - "integrity": "sha512-j1NwnOqMG9mFUOH58JTFsA/+ZYzQLUZ/drqWUqxCYLGeu2JFZL8YrNC9hBxKmWtAuOCHPcRpgv7fhap09Fb4kA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.12.13.tgz", + "integrity": "sha512-XyJmZidNfofEkqFV5VC/bLabGmO5QzenPO/YOfGuEbgU+2sSwMmio3YLb4WtBgcmmdwZHyVyv8on77IUjQ5Gvg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-regex": "^7.4.4", - "regexpu-core": "^4.5.4" + "@babel/helper-create-regexp-features-plugin": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-syntax-async-generators": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.2.0.tgz", - "integrity": "sha512-1ZrIRBv2t0GSlcwVoQ6VgSLpLgiN/FVQUzt9znxo7v2Ov4jJrs8RY8tv0wvDmFN3qIdMKWrmMMW6yZ0G19MfGg==", + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-syntax-decorators": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.2.0.tgz", - "integrity": "sha512-38QdqVoXdHUQfTpZo3rQwqQdWtCn5tMv4uV6r2RMfTqNBuv4ZBhz79SfaQWKTVmxHjeFv/DnXVC/+agHCklYWA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-decorators/-/plugin-syntax-decorators-7.12.13.tgz", + "integrity": "sha512-Rw6aIXGuqDLr6/LoBBYE57nKOzQpz/aDkKlMqEwH+Vp0MXbG6H/TfRjaY343LKxzAKAMXIHsQ8JzaZKuDZ9MwA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-syntax-dynamic-import": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.2.0.tgz", - "integrity": "sha512-mVxuJ0YroI/h/tbFTPGZR8cv6ai+STMKNBq0f8hFxsxWjl94qqhsb+wXbpNMDPU3cfR1TIsVFzU3nXyZMqyK4w==", + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", + "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-export-namespace-from": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", + "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.8.3" } }, "@babel/plugin-syntax-json-strings": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.2.0.tgz", - "integrity": "sha512-5UGYnMSLRE1dqqZwug+1LISpA403HzlSfsg6P9VXU6TBjcSHeNlw4DxDx7LgpF+iKZoOG/+uzqoRHTdcUpiZNg==", + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-jsx": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.2.0.tgz", - "integrity": "sha512-VyN4QANJkRW6lDBmENzRszvZf3/4AXaj9YR7GwrWeeN9tEBPuXbmDYVU9bYBN0D70zCWVwUy0HWq2553VCb6Hw==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.12.13.tgz", + "integrity": "sha512-d4HM23Q1K7oq/SLNmG6mRt85l2csmQ0cHRaxRXjKW0YFdEXqlZ5kzFQKH5Uc3rDJECgu+yCRgPkG04Mm98R/1g==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.12.13" + } + }, + "@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.10.4" + } + }, + "@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.10.4" } }, "@babel/plugin-syntax-object-rest-spread": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.2.0.tgz", - "integrity": "sha512-t0JKGgqk2We+9may3t0xDdmneaXmyxq0xieYcKHxIsrJO64n1OiMWNUtc5gQK1PA0NpdCRrtZp4z+IUaKugrSA==", + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.8.0" } }, "@babel/plugin-syntax-optional-catch-binding": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.2.0.tgz", - "integrity": "sha512-bDe4xKNhb0LI7IvZHiA13kff0KEfaGX/Hv4lMA9+7TEc63hMNvfKo6ZFpXhKuEp+II/q35Gc4NoMeDZyaUbj9w==", + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.8.0" + } + }, + "@babel/plugin-syntax-top-level-await": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.12.13.tgz", + "integrity": "sha512-A81F9pDwyS7yM//KwbCSDqy3Uj4NMIurtplxphWxoYtNPov7cJsDkAFNNyVlIZ3jwGycVsurZ+LtOA8gZ376iQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-arrow-functions": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.2.0.tgz", - "integrity": "sha512-ER77Cax1+8/8jCB9fo4Ud161OZzWN5qawi4GusDuRLcDbDG+bIGYY20zb2dfAFdTRGzrfq2xZPvF0R64EHnimg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.12.13.tgz", + "integrity": "sha512-tBtuN6qtCTd+iHzVZVOMNp+L04iIJBpqkdY42tWbmjIT5wvR2kx7gxMBsyhQtFzHwBbyGi9h8J8r9HgnOpQHxg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-async-to-generator": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.5.0.tgz", - "integrity": "sha512-mqvkzwIGkq0bEF1zLRRiTdjfomZJDV33AH3oQzHVGkI2VzEmXLpKKOBvEVaFZBJdN0XTyH38s9j/Kiqr68dggg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.12.13.tgz", + "integrity": "sha512-psM9QHcHaDr+HZpRuJcE1PXESuGWSCcbiGFFhhwfzdbTxaGDVzuVtdNYliAwcRo3GFg0Bc8MmI+AvIGYIJG04A==", "dev": true, "requires": { - "@babel/helper-module-imports": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-remap-async-to-generator": "^7.1.0" + "@babel/helper-module-imports": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/helper-remap-async-to-generator": "^7.12.13" } }, "@babel/plugin-transform-block-scoped-functions": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.2.0.tgz", - "integrity": "sha512-ntQPR6q1/NKuphly49+QiQiTN0O63uOwjdD6dhIjSWBI5xlrbUFh720TIpzBhpnrLfv2tNH/BXvLIab1+BAI0w==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.12.13.tgz", + "integrity": "sha512-zNyFqbc3kI/fVpqwfqkg6RvBgFpC4J18aKKMmv7KdQ/1GgREapSJAykLMVNwfRGO3BtHj3YQZl8kxCXPcVMVeg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-block-scoping": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.4.4.tgz", - "integrity": "sha512-jkTUyWZcTrwxu5DD4rWz6rDB5Cjdmgz6z7M7RLXOJyCUkFBawssDGcGh8M/0FTSB87avyJI1HsTwUXp9nKA1PA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.12.13.tgz", + "integrity": "sha512-Pxwe0iqWJX4fOOM2kEZeUuAxHMWb9nK+9oh5d11bsLoB0xMg+mkDpt0eYuDZB7ETrY9bbcVlKUGTOGWy7BHsMQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0", - "lodash": "^4.17.11" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-classes": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.4.4.tgz", - "integrity": "sha512-/e44eFLImEGIpL9qPxSRat13I5QNRgBLu2hOQJCF7VLy/otSM/sypV1+XaIw5+502RX/+6YaSAPmldk+nhHDPw==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.12.13.tgz", + "integrity": "sha512-cqZlMlhCC1rVnxE5ZGMtIb896ijL90xppMiuWXcwcOAuFczynpd3KYemb91XFFPi3wJSe/OcrX9lXoowatkkxA==", "dev": true, "requires": { - "@babel/helper-annotate-as-pure": "^7.0.0", - "@babel/helper-define-map": "^7.4.4", - "@babel/helper-function-name": "^7.1.0", - "@babel/helper-optimise-call-expression": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-replace-supers": "^7.4.4", - "@babel/helper-split-export-declaration": "^7.4.4", + "@babel/helper-annotate-as-pure": "^7.12.13", + "@babel/helper-function-name": "^7.12.13", + "@babel/helper-optimise-call-expression": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/helper-replace-supers": "^7.12.13", + "@babel/helper-split-export-declaration": "^7.12.13", "globals": "^11.1.0" } }, "@babel/plugin-transform-computed-properties": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.2.0.tgz", - "integrity": "sha512-kP/drqTxY6Xt3NNpKiMomfgkNn4o7+vKxK2DDKcBG9sHj51vHqMBGy8wbDS/J4lMxnqs153/T3+DmCEAkC5cpA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.12.13.tgz", + "integrity": "sha512-dDfuROUPGK1mTtLKyDPUavmj2b6kFu82SmgpztBFEO974KMjJT+Ytj3/oWsTUMBmgPcp9J5Pc1SlcAYRpJ2hRA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-destructuring": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.5.0.tgz", - "integrity": "sha512-YbYgbd3TryYYLGyC7ZR+Tq8H/+bCmwoaxHfJHupom5ECstzbRLTch6gOQbhEY9Z4hiCNHEURgq06ykFv9JZ/QQ==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.12.13.tgz", + "integrity": "sha512-Dn83KykIFzjhA3FDPA1z4N+yfF3btDGhjnJwxIj0T43tP0flCujnU8fKgEkf0C1biIpSv9NZegPBQ1J6jYkwvQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-dotall-regex": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.4.4.tgz", - "integrity": "sha512-P05YEhRc2h53lZDjRPk/OektxCVevFzZs2Gfjd545Wde3k+yFDbXORgl2e0xpbq8mLcKJ7Idss4fAg0zORN/zg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.12.13.tgz", + "integrity": "sha512-foDrozE65ZFdUC2OfgeOCrEPTxdB3yjqxpXh8CH+ipd9CHd4s/iq81kcUpyH8ACGNEPdFqbtzfgzbT/ZGlbDeQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-regex": "^7.4.4", - "regexpu-core": "^4.5.4" + "@babel/helper-create-regexp-features-plugin": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-duplicate-keys": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.5.0.tgz", - "integrity": "sha512-igcziksHizyQPlX9gfSjHkE2wmoCH3evvD2qR5w29/Dk0SMKE/eOI7f1HhBdNhR/zxJDqrgpoDTq5YSLH/XMsQ==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.12.13.tgz", + "integrity": "sha512-NfADJiiHdhLBW3pulJlJI2NB0t4cci4WTZ8FtdIuNc2+8pslXdPtRRAEWqUY+m9kNOk2eRYbTAOipAxlrOcwwQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-exponentiation-operator": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.2.0.tgz", - "integrity": "sha512-umh4hR6N7mu4Elq9GG8TOu9M0bakvlsREEC+ialrQN6ABS4oDQ69qJv1VtR3uxlKMCQMCvzk7vr17RHKcjx68A==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.12.13.tgz", + "integrity": "sha512-fbUelkM1apvqez/yYx1/oICVnGo2KM5s63mhGylrmXUxK/IAXSIf87QIxVfZldWf4QsOafY6vV3bX8aMHSvNrA==", "dev": true, "requires": { - "@babel/helper-builder-binary-assignment-operator-visitor": "^7.1.0", - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-for-of": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.4.4.tgz", - "integrity": "sha512-9T/5Dlr14Z9TIEXLXkt8T1DU7F24cbhwhMNUziN3hB1AXoZcdzPcTiKGRn/6iOymDqtTKWnr/BtRKN9JwbKtdQ==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.12.13.tgz", + "integrity": "sha512-xCbdgSzXYmHGyVX3+BsQjcd4hv4vA/FDy7Kc8eOpzKmBBPEOTurt0w5fCRQaGl+GSBORKgJdstQ1rHl4jbNseQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-function-name": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.4.4.tgz", - "integrity": "sha512-iU9pv7U+2jC9ANQkKeNF6DrPy4GBa4NWQtl6dHB4Pb3izX2JOEvDTFarlNsBj/63ZEzNNIAMs3Qw4fNCcSOXJA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.12.13.tgz", + "integrity": "sha512-6K7gZycG0cmIwwF7uMK/ZqeCikCGVBdyP2J5SKNCXO5EOHcqi+z7Jwf8AmyDNcBgxET8DrEtCt/mPKPyAzXyqQ==", "dev": true, "requires": { - "@babel/helper-function-name": "^7.1.0", - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-function-name": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-literals": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.2.0.tgz", - "integrity": "sha512-2ThDhm4lI4oV7fVQ6pNNK+sx+c/GM5/SaML0w/r4ZB7sAneD/piDJtwdKlNckXeyGK7wlwg2E2w33C/Hh+VFCg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.12.13.tgz", + "integrity": "sha512-FW+WPjSR7hiUxMcKqyNjP05tQ2kmBCdpEpZHY1ARm96tGQCCBvXKnpjILtDplUnJ/eHZ0lALLM+d2lMFSpYJrQ==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.12.13" + } + }, + "@babel/plugin-transform-member-expression-literals": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.12.13.tgz", + "integrity": "sha512-kxLkOsg8yir4YeEPHLuO2tXP9R/gTjpuTOjshqSpELUN3ZAg2jfDnKUvzzJxObun38sw3wm4Uu69sX/zA7iRvg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-modules-amd": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.5.0.tgz", - "integrity": "sha512-n20UsQMKnWrltocZZm24cRURxQnWIvsABPJlw/fvoy9c6AgHZzoelAIzajDHAQrDpuKFFPPcFGd7ChsYuIUMpg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.12.13.tgz", + "integrity": "sha512-JHLOU0o81m5UqG0Ulz/fPC68/v+UTuGTWaZBUwpEk1fYQ1D9LfKV6MPn4ttJKqRo5Lm460fkzjLTL4EHvCprvA==", "dev": true, "requires": { - "@babel/helper-module-transforms": "^7.1.0", - "@babel/helper-plugin-utils": "^7.0.0", - "babel-plugin-dynamic-import-node": "^2.3.0" + "@babel/helper-module-transforms": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13", + "babel-plugin-dynamic-import-node": "^2.3.3" } }, "@babel/plugin-transform-modules-commonjs": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.5.0.tgz", - "integrity": "sha512-xmHq0B+ytyrWJvQTc5OWAC4ii6Dhr0s22STOoydokG51JjWhyYo5mRPXoi+ZmtHQhZZwuXNN+GG5jy5UZZJxIQ==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.12.13.tgz", + "integrity": "sha512-OGQoeVXVi1259HjuoDnsQMlMkT9UkZT9TpXAsqWplS/M0N1g3TJAn/ByOCeQu7mfjc5WpSsRU+jV1Hd89ts0kQ==", "dev": true, "requires": { - "@babel/helper-module-transforms": "^7.4.4", - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-simple-access": "^7.1.0", - "babel-plugin-dynamic-import-node": "^2.3.0" + "@babel/helper-module-transforms": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/helper-simple-access": "^7.12.13", + "babel-plugin-dynamic-import-node": "^2.3.3" } }, "@babel/plugin-transform-modules-systemjs": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.5.0.tgz", - "integrity": "sha512-Q2m56tyoQWmuNGxEtUyeEkm6qJYFqs4c+XyXH5RAuYxObRNz9Zgj/1g2GMnjYp2EUyEy7YTrxliGCXzecl/vJg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.12.13.tgz", + "integrity": "sha512-aHfVjhZ8QekaNF/5aNdStCGzwTbU7SI5hUybBKlMzqIMC7w7Ho8hx5a4R/DkTHfRfLwHGGxSpFt9BfxKCoXKoA==", "dev": true, "requires": { - "@babel/helper-hoist-variables": "^7.4.4", - "@babel/helper-plugin-utils": "^7.0.0", - "babel-plugin-dynamic-import-node": "^2.3.0" + "@babel/helper-hoist-variables": "^7.12.13", + "@babel/helper-module-transforms": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/helper-validator-identifier": "^7.12.11", + "babel-plugin-dynamic-import-node": "^2.3.3" } }, "@babel/plugin-transform-modules-umd": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.2.0.tgz", - "integrity": "sha512-BV3bw6MyUH1iIsGhXlOK6sXhmSarZjtJ/vMiD9dNmpY8QXFFQTj+6v92pcfy1iqa8DeAfJFwoxcrS/TUZda6sw==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.12.13.tgz", + "integrity": "sha512-BgZndyABRML4z6ibpi7Z98m4EVLFI9tVsZDADC14AElFaNHHBcJIovflJ6wtCqFxwy2YJ1tJhGRsr0yLPKoN+w==", "dev": true, "requires": { - "@babel/helper-module-transforms": "^7.1.0", - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-module-transforms": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.4.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.4.5.tgz", - "integrity": "sha512-z7+2IsWafTBbjNsOxU/Iv5CvTJlr5w4+HGu1HovKYTtgJ362f7kBcQglkfmlspKKZ3bgrbSGvLfNx++ZJgCWsg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.12.13.tgz", + "integrity": "sha512-Xsm8P2hr5hAxyYblrfACXpQKdQbx4m2df9/ZZSQ8MAhsadw06+jW7s9zsSw6he+mJZXRlVMyEnVktJo4zjk1WA==", "dev": true, "requires": { - "regexp-tree": "^0.1.6" + "@babel/helper-create-regexp-features-plugin": "^7.12.13" } }, "@babel/plugin-transform-new-target": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.4.4.tgz", - "integrity": "sha512-r1z3T2DNGQwwe2vPGZMBNjioT2scgWzK9BCnDEh+46z8EEwXBq24uRzd65I7pjtugzPSj921aM15RpESgzsSuA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.12.13.tgz", + "integrity": "sha512-/KY2hbLxrG5GTQ9zzZSc3xWiOy379pIETEhbtzwZcw9rvuaVV4Fqy7BYGYOWZnaoXIQYbbJ0ziXLa/sKcGCYEQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-object-super": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.2.0.tgz", - "integrity": "sha512-VMyhPYZISFZAqAPVkiYb7dUe2AsVi2/wCT5+wZdsNO31FojQJa9ns40hzZ6U9f50Jlq4w6qwzdBB2uwqZ00ebg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.12.13.tgz", + "integrity": "sha512-JzYIcj3XtYspZDV8j9ulnoMPZZnF/Cj0LUxPOjR89BdBVx+zYJI9MdMIlUZjbXDX+6YVeS6I3e8op+qQ3BYBoQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-replace-supers": "^7.1.0" + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/helper-replace-supers": "^7.12.13" } }, "@babel/plugin-transform-parameters": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.4.4.tgz", - "integrity": "sha512-oMh5DUO1V63nZcu/ZVLQFqiihBGo4OpxJxR1otF50GMeCLiRx5nUdtokd+u9SuVJrvvuIh9OosRFPP4pIPnwmw==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.12.13.tgz", + "integrity": "sha512-e7QqwZalNiBRHCpJg/P8s/VJeSRYgmtWySs1JwvfwPqhBbiWfOcHDKdeAi6oAyIimoKWBlwc8oTgbZHdhCoVZA==", + "dev": true, + "requires": { + "@babel/helper-plugin-utils": "^7.12.13" + } + }, + "@babel/plugin-transform-property-literals": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.12.13.tgz", + "integrity": "sha512-nqVigwVan+lR+g8Fj8Exl0UQX2kymtjcWfMOYM1vTYEKujeyv2SkMgazf2qNcK7l4SDiKyTA/nHCPqL4e2zo1A==", "dev": true, "requires": { - "@babel/helper-call-delegate": "^7.4.4", - "@babel/helper-get-function-arity": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-regenerator": { - "version": "7.4.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.4.5.tgz", - "integrity": "sha512-gBKRh5qAaCWntnd09S8QC7r3auLCqq5DI6O0DlfoyDjslSBVqBibrMdsqO+Uhmx3+BlOmE/Kw1HFxmGbv0N9dA==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.12.13.tgz", + "integrity": "sha512-lxb2ZAvSLyJ2PEe47hoGWPmW22v7CtSl9jW8mingV4H2sEX/JOcrAj2nPuGWi56ERUm2bUpjKzONAuT6HCn2EA==", + "dev": true, + "requires": { + "regenerator-transform": "^0.14.2" + } + }, + "@babel/plugin-transform-reserved-words": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.12.13.tgz", + "integrity": "sha512-xhUPzDXxZN1QfiOy/I5tyye+TRz6lA7z6xaT4CLOjPRMVg1ldRf0LHw0TDBpYL4vG78556WuHdyO9oi5UmzZBg==", "dev": true, "requires": { - "regenerator-transform": "^0.14.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-runtime": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.5.0.tgz", - "integrity": "sha512-LmPIZOAgTLl+86gR9KjLXex6P/lRz1fWEjTz6V6QZMmKie51ja3tvzdwORqhHc4RWR8TcZ5pClpRWs0mlaA2ng==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.12.13.tgz", + "integrity": "sha512-ho1CV2lm8qn2AxD3JdvPgtLVHCYLDaOszlf0gosdHcJAIfgNizag76WI+FoibrvfT+h117fgf8h+wgvo4O2qbA==", "dev": true, "requires": { - "@babel/helper-module-imports": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0", - "resolve": "^1.8.1", + "@babel/helper-module-imports": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13", "semver": "^5.5.1" } }, "@babel/plugin-transform-shorthand-properties": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.2.0.tgz", - "integrity": "sha512-QP4eUM83ha9zmYtpbnyjTLAGKQritA5XW/iG9cjtuOI8s1RuL/3V6a3DeSHfKutJQ+ayUfeZJPcnCYEQzaPQqg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.12.13.tgz", + "integrity": "sha512-xpL49pqPnLtf0tVluuqvzWIgLEhuPpZzvs2yabUHSKRNlN7ScYU7aMlmavOeyXJZKgZKQRBlh8rHbKiJDraTSw==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-spread": { - "version": "7.2.2", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.2.2.tgz", - "integrity": "sha512-KWfky/58vubwtS0hLqEnrWJjsMGaOeSBn90Ezn5Jeg9Z8KKHmELbP1yGylMlm5N6TPKeY9A2+UaSYLdxahg01w==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.12.13.tgz", + "integrity": "sha512-dUCrqPIowjqk5pXsx1zPftSq4sT0aCeZVAxhdgs3AMgyaDmoUT0G+5h3Dzja27t76aUEIJWlFgPJqJ/d4dbTtg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/helper-skip-transparent-expression-wrappers": "^7.12.1" } }, "@babel/plugin-transform-sticky-regex": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.2.0.tgz", - "integrity": "sha512-KKYCoGaRAf+ckH8gEL3JHUaFVyNHKe3ASNsZ+AlktgHevvxGigoIttrEJb8iKN03Q7Eazlv1s6cx2B2cQ3Jabw==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.12.13.tgz", + "integrity": "sha512-Jc3JSaaWT8+fr7GRvQP02fKDsYk4K/lYwWq38r/UGfaxo89ajud321NH28KRQ7xy1Ybc0VUE5Pz8psjNNDUglg==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-regex": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-template-literals": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.4.4.tgz", - "integrity": "sha512-mQrEC4TWkhLN0z8ygIvEL9ZEToPhG5K7KDW3pzGqOfIGZ28Jb0POUkeWcoz8HnHvhFy6dwAT1j8OzqN8s804+g==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.12.13.tgz", + "integrity": "sha512-arIKlWYUgmNsF28EyfmiQHJLJFlAJNYkuQO10jL46ggjBpeb2re1P9K9YGxNJB45BqTbaslVysXDYm/g3sN/Qg==", "dev": true, "requires": { - "@babel/helper-annotate-as-pure": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, "@babel/plugin-transform-typeof-symbol": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.2.0.tgz", - "integrity": "sha512-2LNhETWYxiYysBtrBTqL8+La0jIoQQnIScUJc74OYvUGRmkskNY4EzLCnjHBzdmb38wqtTaixpo1NctEcvMDZw==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.12.13.tgz", + "integrity": "sha512-eKv/LmUJpMnu4npgfvs3LiHhJua5fo/CysENxa45YCQXZwKnGCQKAg87bvoqSW1fFT+HA32l03Qxsm8ouTY3ZQ==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0" + "@babel/helper-plugin-utils": "^7.12.13" } }, - "@babel/plugin-transform-unicode-regex": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.4.4.tgz", - "integrity": "sha512-il+/XdNw01i93+M9J9u4T7/e/Ue/vWfNZE4IRUQjplu2Mqb/AFTDimkw2tdEdSH50wuQXZAbXSql0UphQke+vA==", + "@babel/plugin-transform-unicode-escapes": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.12.13.tgz", + "integrity": "sha512-0bHEkdwJ/sN/ikBHfSmOXPypN/beiGqjo+o4/5K+vxEFNPRPdImhviPakMKG4x96l85emoa0Z6cDflsdBusZbw==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/helper-regex": "^7.4.4", - "regexpu-core": "^4.5.4" + "@babel/helper-plugin-utils": "^7.12.13" } }, - "@babel/preset-env": { - "version": "7.3.4", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.3.4.tgz", - "integrity": "sha512-2mwqfYMK8weA0g0uBKOt4FE3iEodiHy9/CW0b+nWXcbL+pGzLx8ESYc+j9IIxr6LTDHWKgPm71i9smo02bw+gA==", + "@babel/plugin-transform-unicode-regex": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.12.13.tgz", + "integrity": "sha512-mDRzSNY7/zopwisPZ5kM9XKCfhchqIYwAKRERtEnhYscZB79VRekuRSoYbN0+KVe3y8+q1h6A4svXtP7N+UoCA==", "dev": true, "requires": { - "@babel/helper-module-imports": "^7.0.0", - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/plugin-proposal-async-generator-functions": "^7.2.0", - "@babel/plugin-proposal-json-strings": "^7.2.0", - "@babel/plugin-proposal-object-rest-spread": "^7.3.4", - "@babel/plugin-proposal-optional-catch-binding": "^7.2.0", - "@babel/plugin-proposal-unicode-property-regex": "^7.2.0", - "@babel/plugin-syntax-async-generators": "^7.2.0", - "@babel/plugin-syntax-json-strings": "^7.2.0", - "@babel/plugin-syntax-object-rest-spread": "^7.2.0", - "@babel/plugin-syntax-optional-catch-binding": "^7.2.0", - "@babel/plugin-transform-arrow-functions": "^7.2.0", - "@babel/plugin-transform-async-to-generator": "^7.3.4", - "@babel/plugin-transform-block-scoped-functions": "^7.2.0", - "@babel/plugin-transform-block-scoping": "^7.3.4", - "@babel/plugin-transform-classes": "^7.3.4", - "@babel/plugin-transform-computed-properties": "^7.2.0", - "@babel/plugin-transform-destructuring": "^7.2.0", - "@babel/plugin-transform-dotall-regex": "^7.2.0", - "@babel/plugin-transform-duplicate-keys": "^7.2.0", - "@babel/plugin-transform-exponentiation-operator": "^7.2.0", - "@babel/plugin-transform-for-of": "^7.2.0", - "@babel/plugin-transform-function-name": "^7.2.0", - "@babel/plugin-transform-literals": "^7.2.0", - "@babel/plugin-transform-modules-amd": "^7.2.0", - "@babel/plugin-transform-modules-commonjs": "^7.2.0", - "@babel/plugin-transform-modules-systemjs": "^7.3.4", - "@babel/plugin-transform-modules-umd": "^7.2.0", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.3.0", - "@babel/plugin-transform-new-target": "^7.0.0", - "@babel/plugin-transform-object-super": "^7.2.0", - "@babel/plugin-transform-parameters": "^7.2.0", - "@babel/plugin-transform-regenerator": "^7.3.4", - "@babel/plugin-transform-shorthand-properties": "^7.2.0", - "@babel/plugin-transform-spread": "^7.2.0", - "@babel/plugin-transform-sticky-regex": "^7.2.0", - "@babel/plugin-transform-template-literals": "^7.2.0", - "@babel/plugin-transform-typeof-symbol": "^7.2.0", - "@babel/plugin-transform-unicode-regex": "^7.2.0", - "browserslist": "^4.3.4", - "invariant": "^2.2.2", - "js-levenshtein": "^1.1.3", - "semver": "^5.3.0" + "@babel/helper-create-regexp-features-plugin": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13" } }, - "@babel/runtime": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.5.4.tgz", - "integrity": "sha512-Na84uwyImZZc3FKf4aUF1tysApzwf3p2yuFBIyBfbzT5glzKTdvYI4KVW4kcgjrzoGUjC7w3YyCHcJKaRxsr2Q==", + "@babel/preset-env": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.12.13.tgz", + "integrity": "sha512-JUVlizG8SoFTz4LmVUL8++aVwzwxcvey3N0j1tRbMAXVEy95uQ/cnEkmEKHN00Bwq4voAV3imQGnQvpkLAxsrw==", + "dev": true, + "requires": { + "@babel/compat-data": "^7.12.13", + "@babel/helper-compilation-targets": "^7.12.13", + "@babel/helper-module-imports": "^7.12.13", + "@babel/helper-plugin-utils": "^7.12.13", + "@babel/helper-validator-option": "^7.12.11", + "@babel/plugin-proposal-async-generator-functions": "^7.12.13", + "@babel/plugin-proposal-class-properties": "^7.12.13", + "@babel/plugin-proposal-dynamic-import": "^7.12.1", + "@babel/plugin-proposal-export-namespace-from": "^7.12.13", + "@babel/plugin-proposal-json-strings": "^7.12.13", + "@babel/plugin-proposal-logical-assignment-operators": "^7.12.13", + "@babel/plugin-proposal-nullish-coalescing-operator": "^7.12.13", + "@babel/plugin-proposal-numeric-separator": "^7.12.13", + "@babel/plugin-proposal-object-rest-spread": "^7.12.13", + "@babel/plugin-proposal-optional-catch-binding": "^7.12.13", + "@babel/plugin-proposal-optional-chaining": "^7.12.13", + "@babel/plugin-proposal-private-methods": "^7.12.13", + "@babel/plugin-proposal-unicode-property-regex": "^7.12.13", + "@babel/plugin-syntax-async-generators": "^7.8.0", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-dynamic-import": "^7.8.0", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3", + "@babel/plugin-syntax-json-strings": "^7.8.0", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.0", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.0", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.0", + "@babel/plugin-syntax-optional-chaining": "^7.8.0", + "@babel/plugin-syntax-top-level-await": "^7.12.13", + "@babel/plugin-transform-arrow-functions": "^7.12.13", + "@babel/plugin-transform-async-to-generator": "^7.12.13", + "@babel/plugin-transform-block-scoped-functions": "^7.12.13", + "@babel/plugin-transform-block-scoping": "^7.12.13", + "@babel/plugin-transform-classes": "^7.12.13", + "@babel/plugin-transform-computed-properties": "^7.12.13", + "@babel/plugin-transform-destructuring": "^7.12.13", + "@babel/plugin-transform-dotall-regex": "^7.12.13", + "@babel/plugin-transform-duplicate-keys": "^7.12.13", + "@babel/plugin-transform-exponentiation-operator": "^7.12.13", + "@babel/plugin-transform-for-of": "^7.12.13", + "@babel/plugin-transform-function-name": "^7.12.13", + "@babel/plugin-transform-literals": "^7.12.13", + "@babel/plugin-transform-member-expression-literals": "^7.12.13", + "@babel/plugin-transform-modules-amd": "^7.12.13", + "@babel/plugin-transform-modules-commonjs": "^7.12.13", + "@babel/plugin-transform-modules-systemjs": "^7.12.13", + "@babel/plugin-transform-modules-umd": "^7.12.13", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.12.13", + "@babel/plugin-transform-new-target": "^7.12.13", + "@babel/plugin-transform-object-super": "^7.12.13", + "@babel/plugin-transform-parameters": "^7.12.13", + "@babel/plugin-transform-property-literals": "^7.12.13", + "@babel/plugin-transform-regenerator": "^7.12.13", + "@babel/plugin-transform-reserved-words": "^7.12.13", + "@babel/plugin-transform-shorthand-properties": "^7.12.13", + "@babel/plugin-transform-spread": "^7.12.13", + "@babel/plugin-transform-sticky-regex": "^7.12.13", + "@babel/plugin-transform-template-literals": "^7.12.13", + "@babel/plugin-transform-typeof-symbol": "^7.12.13", + "@babel/plugin-transform-unicode-escapes": "^7.12.13", + "@babel/plugin-transform-unicode-regex": "^7.12.13", + "@babel/preset-modules": "^0.1.3", + "@babel/types": "^7.12.13", + "core-js-compat": "^3.8.0", + "semver": "^5.5.0" + } + }, + "@babel/preset-modules": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.4.tgz", + "integrity": "sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg==", "dev": true, "requires": { - "regenerator-runtime": "^0.13.2" + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", + "@babel/plugin-transform-dotall-regex": "^7.4.4", + "@babel/types": "^7.4.4", + "esutils": "^2.0.2" } }, - "@babel/runtime-corejs2": { - "version": "7.5.4", - "resolved": "https://registry.npmjs.org/@babel/runtime-corejs2/-/runtime-corejs2-7.5.4.tgz", - "integrity": "sha512-sHv74OzyZ18d6tjHU0HmlVES3+l+lydkOMTiKsJSTGWcTBpIMfXLEgduahlJrQjknW9RCQAqLIEdLOHjBmq/hg==", + "@babel/runtime": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.12.13.tgz", + "integrity": "sha512-8+3UMPBrjFa/6TtKi/7sehPKqfAm4g6K+YQjyyFOLUTxzOngcRZTlAVY8sc2CORJYqdHQY8gRPHmn+qo15rCBw==", "dev": true, "requires": { - "core-js": "^2.6.5", - "regenerator-runtime": "^0.13.2" + "regenerator-runtime": "^0.13.4" } }, "@babel/template": { - "version": "7.4.4", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.4.4.tgz", - "integrity": "sha512-CiGzLN9KgAvgZsnivND7rkA+AeJ9JB0ciPOD4U59GKbQP2iQl+olF1l76kJOupqidozfZ32ghwBEJDhnk9MEcw==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.12.13.tgz", + "integrity": "sha512-/7xxiGA57xMo/P2GVvdEumr8ONhFOhfgq2ihK3h1e6THqzTAkHbkXgB0xI9yeTfIUoH3+oAeHhqm/I43OTbbjA==", "dev": true, "requires": { - "@babel/code-frame": "^7.0.0", - "@babel/parser": "^7.4.4", - "@babel/types": "^7.4.4" + "@babel/code-frame": "^7.12.13", + "@babel/parser": "^7.12.13", + "@babel/types": "^7.12.13" } }, "@babel/traverse": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.5.0.tgz", - "integrity": "sha512-SnA9aLbyOCcnnbQEGwdfBggnc142h/rbqqsXcaATj2hZcegCl903pUD/lfpsNBlBSuWow/YDfRyJuWi2EPR5cg==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.12.13.tgz", + "integrity": "sha512-3Zb4w7eE/OslI0fTp8c7b286/cQps3+vdLW3UcwC8VSJC6GbKn55aeVVu2QJNuCDoeKyptLOFrPq8WqZZBodyA==", "dev": true, "requires": { - "@babel/code-frame": "^7.0.0", - "@babel/generator": "^7.5.0", - "@babel/helper-function-name": "^7.1.0", - "@babel/helper-split-export-declaration": "^7.4.4", - "@babel/parser": "^7.5.0", - "@babel/types": "^7.5.0", + "@babel/code-frame": "^7.12.13", + "@babel/generator": "^7.12.13", + "@babel/helper-function-name": "^7.12.13", + "@babel/helper-split-export-declaration": "^7.12.13", + "@babel/parser": "^7.12.13", + "@babel/types": "^7.12.13", "debug": "^4.1.0", "globals": "^11.1.0", - "lodash": "^4.17.11" + "lodash": "^4.17.19" } }, "@babel/types": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.5.0.tgz", - "integrity": "sha512-UFpDVqRABKsW01bvw7/wSUe56uy6RXM5+VJibVVAybDGxEW25jdwiFJEf7ASvSaC7sN7rbE/l3cLp2izav+CtQ==", + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.12.13.tgz", + "integrity": "sha512-oKrdZTld2im1z8bDwTOQvUbxKwE+854zc16qWZQlcTqMN00pWxHQ4ZeOq0yDMnisOpRykH2/5Qqcrk/OlbAjiQ==", "dev": true, "requires": { - "esutils": "^2.0.2", - "lodash": "^4.17.11", + "@babel/helper-validator-identifier": "^7.12.11", + "lodash": "^4.17.19", "to-fast-properties": "^2.0.0" } }, "@hapi/address": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@hapi/address/-/address-2.0.0.tgz", - "integrity": "sha512-mV6T0IYqb0xL1UALPFplXYQmR0twnXG0M6jUswpquqT2sD12BOiCiLy3EvMp/Fy7s3DZElC4/aPjEjo2jeZpvw==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@hapi/address/-/address-2.1.4.tgz", + "integrity": "sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ==", + "dev": true + }, + "@hapi/bourne": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-1.3.2.tgz", + "integrity": "sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA==", "dev": true }, "@hapi/hoek": { - "version": "6.2.4", - "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-6.2.4.tgz", - "integrity": "sha512-HOJ20Kc93DkDVvjwHyHawPwPkX44sIrbXazAUDiUXaY2R9JwQGo2PhFfnQtdrsIe4igjG2fPgMra7NYw7qhy0A==", + "version": "8.5.1", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-8.5.1.tgz", + "integrity": "sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow==", "dev": true }, "@hapi/joi": { - "version": "15.1.0", - "resolved": "https://registry.npmjs.org/@hapi/joi/-/joi-15.1.0.tgz", - "integrity": "sha512-n6kaRQO8S+kepUTbXL9O/UOL788Odqs38/VOfoCrATDtTvyfiO3fgjlSRaNkHabpTLgM7qru9ifqXlXbXk8SeQ==", + "version": "15.1.1", + "resolved": "https://registry.npmjs.org/@hapi/joi/-/joi-15.1.1.tgz", + "integrity": "sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ==", "dev": true, "requires": { "@hapi/address": "2.x.x", - "@hapi/hoek": "6.x.x", - "@hapi/marker": "1.x.x", + "@hapi/bourne": "1.x.x", + "@hapi/hoek": "8.x.x", "@hapi/topo": "3.x.x" } }, - "@hapi/marker": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@hapi/marker/-/marker-1.0.0.tgz", - "integrity": "sha512-JOfdekTXnJexfE8PyhZFyHvHjt81rBFSAbTIRAhF2vv/2Y1JzoKsGqxH/GpZJoF7aEfYok8JVcAHmSz1gkBieA==", - "dev": true - }, "@hapi/topo": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-3.1.2.tgz", - "integrity": "sha512-r+aumOqJ5QbD6aLPJWqVjMAPsx5pZKz+F5yPqXZ/WWG9JTtHbQqlzrJoknJ0iJxLj9vlXtmpSdjlkszseeG8OA==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-3.1.6.tgz", + "integrity": "sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ==", "dev": true, "requires": { - "@hapi/hoek": "8.x.x" - }, - "dependencies": { - "@hapi/hoek": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-8.0.2.tgz", - "integrity": "sha512-O6o6mrV4P65vVccxymuruucb+GhP2zl9NLCG8OdoFRS8BEGw3vwpPp20wpAtpbQQxz1CEUtmxJGgWhjq1XA3qw==", - "dev": true - } + "@hapi/hoek": "^8.3.0" } }, "@intervolga/optimize-cssnano-plugin": { @@ -895,85 +1092,191 @@ "dev": true }, "@soda/friendly-errors-webpack-plugin": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/@soda/friendly-errors-webpack-plugin/-/friendly-errors-webpack-plugin-1.7.1.tgz", - "integrity": "sha512-cWKrGaFX+rfbMrAxVv56DzhPNqOJPZuNIS2HGMELtgGzb+vsMzyig9mml5gZ/hr2BGtSLV+dP2LUEuAL8aG2mQ==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@soda/friendly-errors-webpack-plugin/-/friendly-errors-webpack-plugin-1.8.0.tgz", + "integrity": "sha512-RLotfx6k1+nfLacwNCenj7VnTMPxVwYKoGOcffMFoJDKM8tXzBiCN0hMHFJNnoAojduYAsxuiMm0EOMixgiRow==", "dev": true, "requires": { - "chalk": "^1.1.3", - "error-stack-parser": "^2.0.0", - "string-width": "^2.0.0" + "chalk": "^2.4.2", + "error-stack-parser": "^2.0.2", + "string-width": "^2.0.0", + "strip-ansi": "^5" }, "dependencies": { "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", "dev": true }, - "ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", "dev": true }, - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", + "string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", "dev": true, "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "dependencies": { + "strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "requires": { + "ansi-regex": "^3.0.0" + } + } } }, "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", "dev": true, "requires": { - "ansi-regex": "^2.0.0" + "ansi-regex": "^4.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + } } - }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", - "dev": true } } }, - "@types/events": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@types/events/-/events-3.0.0.tgz", - "integrity": "sha512-EaObqwIvayI5a8dCzhFrjKzVwKLxjoG9T6Ppd5CEo07LRKfQ8Yokw54r5+Wq7FaBQ+yXRvQAYPrHwya1/UFt9g==", + "@soda/get-current-script": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@soda/get-current-script/-/get-current-script-1.0.2.tgz", + "integrity": "sha512-T7VNNlYVM1SgQ+VsMYhnDkcGmWhQdL0bDyGm5TlQ3GBXnJscEClUUOKduWTmm2zCnvNLC1hc3JpuXjs/nFOc5w==", + "dev": true + }, + "@types/anymatch": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/@types/anymatch/-/anymatch-1.3.1.tgz", + "integrity": "sha512-/+CRPXpBDpo2RK9C68N3b2cOvO0Cf5B9aPijHsoDQTHivnGSObdOF2BRQOYjojWTDy6nQvMjmqRXIxH55VjxxA==", "dev": true }, + "@types/body-parser": { + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.0.tgz", + "integrity": "sha512-W98JrE0j2K78swW4ukqMleo8R7h/pFETjM2DQ90MF6XK2i4LO4W3gQ71Lt4w3bfm2EvVSyWHplECvB5sK22yFQ==", + "dev": true, + "requires": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "@types/connect": { + "version": "3.4.34", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.34.tgz", + "integrity": "sha512-ePPA/JuI+X0vb+gSWlPKOY0NdNAie/rPUqX2GUPpbZwiKTkSPhjXWuee47E4MtE54QVzGCQMQkAL6JhV2E1+cQ==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/connect-history-api-fallback": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/@types/connect-history-api-fallback/-/connect-history-api-fallback-1.3.3.tgz", + "integrity": "sha512-7SxFCd+FLlxCfwVwbyPxbR4khL9aNikJhrorw8nUIOqeuooc9gifBuDQOJw5kzN7i6i3vLn9G8Wde/4QDihpYw==", + "dev": true, + "requires": { + "@types/express-serve-static-core": "*", + "@types/node": "*" + } + }, + "@types/express": { + "version": "4.17.11", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.11.tgz", + "integrity": "sha512-no+R6rW60JEc59977wIxreQVsIEOAYwgCqldrA/vkpCnbD7MqTefO97lmoBe4WE0F156bC4uLSP1XHDOySnChg==", + "dev": true, + "requires": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.18", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "@types/express-serve-static-core": { + "version": "4.17.18", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.18.tgz", + "integrity": "sha512-m4JTwx5RUBNZvky/JJ8swEJPKFd8si08pPF2PfizYjGZOKr/svUWPcoUmLow6MmPzhasphB7gSTINY67xn3JNA==", + "dev": true, + "requires": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*" + } + }, "@types/glob": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.1.1.tgz", - "integrity": "sha512-1Bh06cbWJUHMC97acuD6UMG29nMt0Aqz1vF3guLfG+kHHJhy3AyohZFFxYk2f7Q1SQIrNwvncxAE0N/9s70F2w==", + "version": "7.1.3", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.1.3.tgz", + "integrity": "sha512-SEYeGAIQIQX8NN6LDKprLjbrd5dARM5EXsd8GI/A5l0apYI1fGMWgPHSe4ZKL4eozlAyI+doUE9XbYS4xCkQ1w==", "dev": true, "requires": { - "@types/events": "*", "@types/minimatch": "*", "@types/node": "*" } }, + "@types/http-proxy": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/@types/http-proxy/-/http-proxy-1.17.5.tgz", + "integrity": "sha512-GNkDE7bTv6Sf8JbV2GksknKOsk7OznNYHSdrtvPJXO0qJ9odZig6IZKUi5RFGi6d1bf6dgIAe4uXi3DBc7069Q==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/http-proxy-middleware": { + "version": "0.19.3", + "resolved": "https://registry.npmjs.org/@types/http-proxy-middleware/-/http-proxy-middleware-0.19.3.tgz", + "integrity": "sha512-lnBTx6HCOUeIJMLbI/LaL5EmdKLhczJY5oeXZpX/cXE4rRqb3RmV7VcMpiEfYkmTjipv3h7IAyIINe4plEv7cA==", + "dev": true, + "requires": { + "@types/connect": "*", + "@types/http-proxy": "*", + "@types/node": "*" + } + }, + "@types/json-schema": { + "version": "7.0.7", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.7.tgz", + "integrity": "sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA==", + "dev": true + }, + "@types/mime": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", + "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==", + "dev": true + }, "@types/minimatch": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.3.tgz", "integrity": "sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==", "dev": true }, + "@types/minimist": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.1.tgz", + "integrity": "sha512-fZQQafSREFyuZcdWFAExYjBiCL7AUCdgsk80iO0q4yihYYdcIiH28CcuPTGFgLOCC8RlW49GSQxdHwZP+I7CNg==", + "dev": true + }, "@types/node": { - "version": "12.6.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.6.2.tgz", - "integrity": "sha512-gojym4tX0FWeV2gsW4Xmzo5wxGjXGm550oVUII7f7G5o4BV6c7DBdiG1RRQd+y1bvqRyYtPfMK85UM95vsapqQ==", + "version": "14.14.25", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.25.tgz", + "integrity": "sha512-EPpXLOVqDvisVxtlbvzfyqSsFeQxltFbluZNRndIb8tr9KiBnYNLzrc1N3pyKUCww2RNrfHDViqDWWE1LCJQtQ==", "dev": true }, "@types/normalize-package-data": { @@ -983,408 +1286,590 @@ "dev": true }, "@types/q": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/@types/q/-/q-1.5.2.tgz", - "integrity": "sha512-ce5d3q03Ex0sy4R14722Rmt6MT07Ua+k4FwDfdcToYJcMKNtRVQvJ6JCAPdAmAnbRb6CsX6aYb9m96NGod9uTw==", + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/@types/q/-/q-1.5.4.tgz", + "integrity": "sha512-1HcDas8SEj4z1Wc696tH56G8OlRaH/sqZOynNNB+HF0WOeXPaxTtbYzJY2oEfiUxjSKjhCKr+MvR7dCHcEelug==", "dev": true }, + "@types/qs": { + "version": "6.9.5", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.5.tgz", + "integrity": "sha512-/JHkVHtx/REVG0VVToGRGH2+23hsYLHdyG+GrvoUGlGAd0ErauXDyvHtRI/7H7mzLm+tBCKA7pfcpkQ1lf58iQ==", + "dev": true + }, + "@types/range-parser": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.3.tgz", + "integrity": "sha512-ewFXqrQHlFsgc09MK5jP5iR7vumV/BYayNC6PgJO2LPe8vrnNFyjQjSppfEngITi0qvfKtzFvgKymGheFM9UOA==", + "dev": true + }, + "@types/serve-static": { + "version": "1.13.9", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.9.tgz", + "integrity": "sha512-ZFqF6qa48XsPdjXV5Gsz0Zqmux2PerNd3a/ktL45mHpa19cuMi/cL8tcxdAx497yRh+QtYPuofjT9oWw9P7nkA==", + "dev": true, + "requires": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "@types/source-list-map": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@types/source-list-map/-/source-list-map-0.1.2.tgz", + "integrity": "sha512-K5K+yml8LTo9bWJI/rECfIPrGgxdpeNbj+d53lwN4QjW1MCwlkhUms+gtdzigTeUyBr09+u8BwOIY3MXvHdcsA==", + "dev": true + }, + "@types/tapable": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/tapable/-/tapable-1.0.6.tgz", + "integrity": "sha512-W+bw9ds02rAQaMvaLYxAbJ6cvguW/iJXNT6lTssS1ps6QdrMKttqEAMEG/b5CR8TZl3/L7/lH0ZV5nNR1LXikA==", + "dev": true + }, + "@types/uglify-js": { + "version": "3.11.1", + "resolved": "https://registry.npmjs.org/@types/uglify-js/-/uglify-js-3.11.1.tgz", + "integrity": "sha512-7npvPKV+jINLu1SpSYVWG8KvyJBhBa8tmzMMdDoVc2pWUYHN8KIXlPJhjJ4LT97c4dXJA2SHL/q6ADbDriZN+Q==", + "dev": true, + "requires": { + "source-map": "^0.6.1" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "@types/webpack": { + "version": "4.41.26", + "resolved": "https://registry.npmjs.org/@types/webpack/-/webpack-4.41.26.tgz", + "integrity": "sha512-7ZyTfxjCRwexh+EJFwRUM+CDB2XvgHl4vfuqf1ZKrgGvcS5BrNvPQqJh3tsZ0P6h6Aa1qClVHaJZszLPzpqHeA==", + "dev": true, + "requires": { + "@types/anymatch": "*", + "@types/node": "*", + "@types/tapable": "*", + "@types/uglify-js": "*", + "@types/webpack-sources": "*", + "source-map": "^0.6.0" + }, + "dependencies": { + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true + } + } + }, + "@types/webpack-dev-server": { + "version": "3.11.1", + "resolved": "https://registry.npmjs.org/@types/webpack-dev-server/-/webpack-dev-server-3.11.1.tgz", + "integrity": "sha512-rIb+LtUkKnh7+oIJm3WiMJONd71Q0lZuqGLcSqhZ5qjN9gV/CNmZe7Bai+brnBPZ/KVYOsr+4bFLiNZwjBicLw==", + "dev": true, + "requires": { + "@types/connect-history-api-fallback": "*", + "@types/express": "*", + "@types/http-proxy-middleware": "*", + "@types/serve-static": "*", + "@types/webpack": "*" + } + }, + "@types/webpack-sources": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/webpack-sources/-/webpack-sources-2.1.0.tgz", + "integrity": "sha512-LXn/oYIpBeucgP1EIJbKQ2/4ZmpvRl+dlrFdX7+94SKRUV3Evy3FsfMZY318vGhkWUS5MPhtOM3w1/hCOAOXcg==", + "dev": true, + "requires": { + "@types/node": "*", + "@types/source-list-map": "*", + "source-map": "^0.7.3" + }, + "dependencies": { + "source-map": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", + "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", + "dev": true + } + } + }, "@vue/babel-helper-vue-jsx-merge-props": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@vue/babel-helper-vue-jsx-merge-props/-/babel-helper-vue-jsx-merge-props-1.0.0.tgz", - "integrity": "sha512-6tyf5Cqm4m6v7buITuwS+jHzPlIPxbFzEhXR5JGZpbrvOcp1hiQKckd305/3C7C36wFekNTQSxAtgeM0j0yoUw==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@vue/babel-helper-vue-jsx-merge-props/-/babel-helper-vue-jsx-merge-props-1.2.1.tgz", + "integrity": "sha512-QOi5OW45e2R20VygMSNhyQHvpdUwQZqGPc748JLGCYEy+yp8fNFNdbNIGAgZmi9e+2JHPd6i6idRuqivyicIkA==", + "dev": true + }, + "@vue/babel-helper-vue-transform-on": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@vue/babel-helper-vue-transform-on/-/babel-helper-vue-transform-on-1.0.2.tgz", + "integrity": "sha512-hz4R8tS5jMn8lDq6iD+yWL6XNB699pGIVLk7WSJnn1dbpjaazsjZQkieJoRX6gW5zpYSCFqQ7jUquPNY65tQYA==", "dev": true }, + "@vue/babel-plugin-jsx": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@vue/babel-plugin-jsx/-/babel-plugin-jsx-1.0.2.tgz", + "integrity": "sha512-1uZlQCLCeuqJgDYLCmg3qfsvTVtOQiXh278ES4bvPTYYbv2Bi/rElLETK6AdjI9xxzyTUf5n1QEiH8Xxz0eZrg==", + "dev": true, + "requires": { + "@babel/helper-module-imports": "^7.0.0", + "@babel/plugin-syntax-jsx": "^7.0.0", + "@babel/template": "^7.0.0", + "@babel/traverse": "^7.0.0", + "@babel/types": "^7.0.0", + "@vue/babel-helper-vue-transform-on": "^1.0.2", + "camelcase": "^6.0.0", + "html-tags": "^3.1.0", + "svg-tags": "^1.0.0" + } + }, "@vue/babel-plugin-transform-vue-jsx": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@vue/babel-plugin-transform-vue-jsx/-/babel-plugin-transform-vue-jsx-1.0.0.tgz", - "integrity": "sha512-U+JNwVQSmaLKjO3lzCUC3cNXxprgezV1N+jOdqbP4xWNaqtWUCJnkjTVcgECM18A/AinDKPcUUeoyhU7yxUxXQ==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@vue/babel-plugin-transform-vue-jsx/-/babel-plugin-transform-vue-jsx-1.2.1.tgz", + "integrity": "sha512-HJuqwACYehQwh1fNT8f4kyzqlNMpBuUK4rSiSES5D4QsYncv5fxFsLyrxFPG2ksO7t5WP+Vgix6tt6yKClwPzA==", "dev": true, "requires": { "@babel/helper-module-imports": "^7.0.0", "@babel/plugin-syntax-jsx": "^7.2.0", - "@vue/babel-helper-vue-jsx-merge-props": "^1.0.0", + "@vue/babel-helper-vue-jsx-merge-props": "^1.2.1", "html-tags": "^2.0.0", "lodash.kebabcase": "^4.1.1", "svg-tags": "^1.0.0" + }, + "dependencies": { + "html-tags": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-2.0.0.tgz", + "integrity": "sha1-ELMKOGCF9Dzt41PMj6fLDe7qZos=", + "dev": true + } } }, "@vue/babel-preset-app": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@vue/babel-preset-app/-/babel-preset-app-3.9.2.tgz", - "integrity": "sha512-0suuCbu4jkVcVYBjPmuKxeDbrhwThYZHu3DUmtsVuOzFEGeXmco60VmXveniL/bnDUdZyknSuYP4FxgS34gw9w==", + "version": "4.5.11", + "resolved": "https://registry.npmjs.org/@vue/babel-preset-app/-/babel-preset-app-4.5.11.tgz", + "integrity": "sha512-9VoFlm/9vhynKNGM+HA7qBsoQSUEnuG5i5kcFI9vTLLrh8A0fxrwUyVLLppO6T1sAZ6vrKdQFnEkjL+RkRAwWQ==", + "dev": true, + "requires": { + "@babel/core": "^7.11.0", + "@babel/helper-compilation-targets": "^7.9.6", + "@babel/helper-module-imports": "^7.8.3", + "@babel/plugin-proposal-class-properties": "^7.8.3", + "@babel/plugin-proposal-decorators": "^7.8.3", + "@babel/plugin-syntax-dynamic-import": "^7.8.3", + "@babel/plugin-syntax-jsx": "^7.8.3", + "@babel/plugin-transform-runtime": "^7.11.0", + "@babel/preset-env": "^7.11.0", + "@babel/runtime": "^7.11.0", + "@vue/babel-plugin-jsx": "^1.0.0-0", + "@vue/babel-preset-jsx": "^1.1.2", + "babel-plugin-dynamic-import-node": "^2.3.3", + "core-js": "^3.6.5", + "core-js-compat": "^3.6.5", + "semver": "^6.1.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } + } + }, + "@vue/babel-preset-jsx": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@vue/babel-preset-jsx/-/babel-preset-jsx-1.2.4.tgz", + "integrity": "sha512-oRVnmN2a77bYDJzeGSt92AuHXbkIxbf/XXSE3klINnh9AXBmVS1DGa1f0d+dDYpLfsAKElMnqKTQfKn7obcL4w==", "dev": true, "requires": { - "@babel/helper-module-imports": "^7.0.0", - "@babel/plugin-proposal-class-properties": "^7.0.0", - "@babel/plugin-proposal-decorators": "^7.1.0", - "@babel/plugin-syntax-dynamic-import": "^7.0.0", - "@babel/plugin-syntax-jsx": "^7.0.0", - "@babel/plugin-transform-runtime": "^7.4.0", - "@babel/preset-env": "^7.0.0 < 7.4.0", - "@babel/runtime": "^7.0.0", - "@babel/runtime-corejs2": "^7.2.0", - "@vue/babel-preset-jsx": "^1.0.0", - "babel-plugin-dynamic-import-node": "^2.2.0", - "babel-plugin-module-resolver": "3.2.0", - "core-js": "^2.6.5" + "@vue/babel-helper-vue-jsx-merge-props": "^1.2.1", + "@vue/babel-plugin-transform-vue-jsx": "^1.2.1", + "@vue/babel-sugar-composition-api-inject-h": "^1.2.1", + "@vue/babel-sugar-composition-api-render-instance": "^1.2.4", + "@vue/babel-sugar-functional-vue": "^1.2.2", + "@vue/babel-sugar-inject-h": "^1.2.2", + "@vue/babel-sugar-v-model": "^1.2.3", + "@vue/babel-sugar-v-on": "^1.2.3" } }, - "@vue/babel-preset-jsx": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@vue/babel-preset-jsx/-/babel-preset-jsx-1.0.0.tgz", - "integrity": "sha512-5CbDu/QHS+TtQNw5aYAffiMxBBB2Eo9+RJpS8X+6FJbdG5Rvc4TVipEqkrg0pJviWadNg7TEy0Uz4o7VNXeIZw==", + "@vue/babel-sugar-composition-api-inject-h": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@vue/babel-sugar-composition-api-inject-h/-/babel-sugar-composition-api-inject-h-1.2.1.tgz", + "integrity": "sha512-4B3L5Z2G+7s+9Bwbf+zPIifkFNcKth7fQwekVbnOA3cr3Pq71q71goWr97sk4/yyzH8phfe5ODVzEjX7HU7ItQ==", + "dev": true, + "requires": { + "@babel/plugin-syntax-jsx": "^7.2.0" + } + }, + "@vue/babel-sugar-composition-api-render-instance": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@vue/babel-sugar-composition-api-render-instance/-/babel-sugar-composition-api-render-instance-1.2.4.tgz", + "integrity": "sha512-joha4PZznQMsxQYXtR3MnTgCASC9u3zt9KfBxIeuI5g2gscpTsSKRDzWQt4aqNIpx6cv8On7/m6zmmovlNsG7Q==", "dev": true, "requires": { - "@vue/babel-helper-vue-jsx-merge-props": "^1.0.0", - "@vue/babel-plugin-transform-vue-jsx": "^1.0.0", - "@vue/babel-sugar-functional-vue": "^1.0.0", - "@vue/babel-sugar-inject-h": "^1.0.0", - "@vue/babel-sugar-v-model": "^1.0.0", - "@vue/babel-sugar-v-on": "^1.0.0" + "@babel/plugin-syntax-jsx": "^7.2.0" } }, "@vue/babel-sugar-functional-vue": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@vue/babel-sugar-functional-vue/-/babel-sugar-functional-vue-1.0.0.tgz", - "integrity": "sha512-XE/jNaaorTuhWayCz+QClk5AB9OV5HzrwbzEC6sIUY0J60A28ONQKeTwxfidW42egOkqNH/UU6eE3KLfmiDj0Q==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@vue/babel-sugar-functional-vue/-/babel-sugar-functional-vue-1.2.2.tgz", + "integrity": "sha512-JvbgGn1bjCLByIAU1VOoepHQ1vFsroSA/QkzdiSs657V79q6OwEWLCQtQnEXD/rLTA8rRit4rMOhFpbjRFm82w==", "dev": true, "requires": { "@babel/plugin-syntax-jsx": "^7.2.0" } }, "@vue/babel-sugar-inject-h": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@vue/babel-sugar-inject-h/-/babel-sugar-inject-h-1.0.0.tgz", - "integrity": "sha512-NxWU+DqtbZgfGvd25GPoFMj+rvyQ8ZA1pHj8vIeqRij+vx3sXoKkObjA9ulZunvWw5F6uG9xYy4ytpxab/X+Hg==", + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/@vue/babel-sugar-inject-h/-/babel-sugar-inject-h-1.2.2.tgz", + "integrity": "sha512-y8vTo00oRkzQTgufeotjCLPAvlhnpSkcHFEp60+LJUwygGcd5Chrpn5480AQp/thrxVm8m2ifAk0LyFel9oCnw==", "dev": true, "requires": { "@babel/plugin-syntax-jsx": "^7.2.0" } }, "@vue/babel-sugar-v-model": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@vue/babel-sugar-v-model/-/babel-sugar-v-model-1.0.0.tgz", - "integrity": "sha512-Pfg2Al0io66P1eO6zUbRIgpyKCU2qTnumiE0lao/wA/uNdb7Dx5Tfd1W6tO5SsByETPnEs8i8+gawRIXX40rFw==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@vue/babel-sugar-v-model/-/babel-sugar-v-model-1.2.3.tgz", + "integrity": "sha512-A2jxx87mySr/ulAsSSyYE8un6SIH0NWHiLaCWpodPCVOlQVODCaSpiR4+IMsmBr73haG+oeCuSvMOM+ttWUqRQ==", "dev": true, "requires": { "@babel/plugin-syntax-jsx": "^7.2.0", - "@vue/babel-helper-vue-jsx-merge-props": "^1.0.0", - "@vue/babel-plugin-transform-vue-jsx": "^1.0.0", + "@vue/babel-helper-vue-jsx-merge-props": "^1.2.1", + "@vue/babel-plugin-transform-vue-jsx": "^1.2.1", "camelcase": "^5.0.0", "html-tags": "^2.0.0", "svg-tags": "^1.0.0" + }, + "dependencies": { + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "html-tags": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-2.0.0.tgz", + "integrity": "sha1-ELMKOGCF9Dzt41PMj6fLDe7qZos=", + "dev": true + } } }, "@vue/babel-sugar-v-on": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@vue/babel-sugar-v-on/-/babel-sugar-v-on-1.0.0.tgz", - "integrity": "sha512-2aqJaDLKdSSGlxZU+GjFERaSNUaa6DQreV+V/K4W/6Lxj8520/r1lChWEa/zuAoPD2Vhy0D2QrqqO+I0D6CkKw==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/@vue/babel-sugar-v-on/-/babel-sugar-v-on-1.2.3.tgz", + "integrity": "sha512-kt12VJdz/37D3N3eglBywV8GStKNUhNrsxChXIV+o0MwVXORYuhDTHJRKPgLJRb/EY3vM2aRFQdxJBp9CLikjw==", "dev": true, "requires": { "@babel/plugin-syntax-jsx": "^7.2.0", - "@vue/babel-plugin-transform-vue-jsx": "^1.0.0", + "@vue/babel-plugin-transform-vue-jsx": "^1.2.1", "camelcase": "^5.0.0" + }, + "dependencies": { + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + } } }, "@vue/cli-overlay": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/@vue/cli-overlay/-/cli-overlay-3.9.0.tgz", - "integrity": "sha512-QfyvpJl2ChehBT2qzb5EvW921JxW94uFL3+lHa6VT42ImH8awrvkTGZmxTQWhHvATa7r0LKy7M7ZRMyo547esg==", + "version": "4.5.11", + "resolved": "https://registry.npmjs.org/@vue/cli-overlay/-/cli-overlay-4.5.11.tgz", + "integrity": "sha512-aDQNw+oGk5+KR0vL9TocjfzyYHTJxR2lS8iPbcL4lRglCs2dudOE7QWXypj5dM4rQus0jJ5fxJTS55o9uy9fcQ==", "dev": true }, "@vue/cli-plugin-babel": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@vue/cli-plugin-babel/-/cli-plugin-babel-3.9.2.tgz", - "integrity": "sha512-XqfmGjUGnnJ3NA+HC31F6nkBvB9pFDhk4Lxeao8ZNJcEjKNEBYjlmHunJQdIe/jEXXum6U+U/ZE6DjDStHTIMw==", + "version": "4.5.11", + "resolved": "https://registry.npmjs.org/@vue/cli-plugin-babel/-/cli-plugin-babel-4.5.11.tgz", + "integrity": "sha512-ogUMeO2waDtghIWwmuAzMJAnnPdmqRdJlwJDca9u6BK9jX1bxNThBSFS/MN2VmlYzulOnqH4zAC87jTWNg/czg==", "dev": true, "requires": { - "@babel/core": "^7.0.0", - "@vue/babel-preset-app": "^3.9.2", - "@vue/cli-shared-utils": "^3.9.0", - "babel-loader": "^8.0.5", - "webpack": ">=4 < 4.29" + "@babel/core": "^7.11.0", + "@vue/babel-preset-app": "^4.5.11", + "@vue/cli-shared-utils": "^4.5.11", + "babel-loader": "^8.1.0", + "cache-loader": "^4.1.0", + "thread-loader": "^2.1.3", + "webpack": "^4.0.0" } }, "@vue/cli-plugin-eslint": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@vue/cli-plugin-eslint/-/cli-plugin-eslint-3.9.2.tgz", - "integrity": "sha512-AdvWJN+4Px2r3hbTDM2/rCtTcS6VyI7XuRljbfr2V9nF9cJiH4qsXFrTCRj3OgupbXJ14fUGKrLxmznLZIm1jA==", + "version": "4.5.11", + "resolved": "https://registry.npmjs.org/@vue/cli-plugin-eslint/-/cli-plugin-eslint-4.5.11.tgz", + "integrity": "sha512-6XrF3A3ryjtqoPMYL0ltZaP0631HS2a68Ye34KIkz111EKXtC5ip+gz6bSPWrH5SbhinU3R8cstA8xVASz9kwg==", "dev": true, "requires": { - "@vue/cli-shared-utils": "^3.9.0", - "babel-eslint": "^10.0.1", - "eslint": "^4.19.1", - "eslint-loader": "^2.1.2", - "eslint-plugin-vue": "^4.7.1", + "@vue/cli-shared-utils": "^4.5.11", + "eslint-loader": "^2.2.1", "globby": "^9.2.0", - "webpack": ">=4 < 4.29", + "inquirer": "^7.1.0", + "webpack": "^4.0.0", "yorkie": "^2.0.0" + } + }, + "@vue/cli-plugin-router": { + "version": "4.5.11", + "resolved": "https://registry.npmjs.org/@vue/cli-plugin-router/-/cli-plugin-router-4.5.11.tgz", + "integrity": "sha512-09tzw3faOs48IUPwLutYaNC7eoyyL140fKruTwdFdXuBLDdSQVida57Brx0zj2UKXc5qF8hk4GoGrOshN0KfNg==", + "dev": true, + "requires": { + "@vue/cli-shared-utils": "^4.5.11" + } + }, + "@vue/cli-plugin-vuex": { + "version": "4.5.11", + "resolved": "https://registry.npmjs.org/@vue/cli-plugin-vuex/-/cli-plugin-vuex-4.5.11.tgz", + "integrity": "sha512-JBPeZLubiSHbRkEKDj0tnLiU43AJ3vt6JULn4IKWH1XWZ6MFC8vElaP5/AA4O3Zko5caamDDBq3TRyxdA2ncUQ==", + "dev": true + }, + "@vue/cli-service": { + "version": "4.5.11", + "resolved": "https://registry.npmjs.org/@vue/cli-service/-/cli-service-4.5.11.tgz", + "integrity": "sha512-FXeJh2o6B8q/njv2Ebhe9EsLXt9sPMXGDY5zVvcV5jgj9wkoej9yLfnmwWCau5kegNClP6bcM+BEHuMYxJ+ubQ==", + "dev": true, + "requires": { + "@intervolga/optimize-cssnano-plugin": "^1.0.5", + "@soda/friendly-errors-webpack-plugin": "^1.7.1", + "@soda/get-current-script": "^1.0.0", + "@types/minimist": "^1.2.0", + "@types/webpack": "^4.0.0", + "@types/webpack-dev-server": "^3.11.0", + "@vue/cli-overlay": "^4.5.11", + "@vue/cli-plugin-router": "^4.5.11", + "@vue/cli-plugin-vuex": "^4.5.11", + "@vue/cli-shared-utils": "^4.5.11", + "@vue/component-compiler-utils": "^3.1.2", + "@vue/preload-webpack-plugin": "^1.1.0", + "@vue/web-component-wrapper": "^1.2.0", + "acorn": "^7.4.0", + "acorn-walk": "^7.1.1", + "address": "^1.1.2", + "autoprefixer": "^9.8.6", + "browserslist": "^4.12.0", + "cache-loader": "^4.1.0", + "case-sensitive-paths-webpack-plugin": "^2.3.0", + "cli-highlight": "^2.1.4", + "clipboardy": "^2.3.0", + "cliui": "^6.0.0", + "copy-webpack-plugin": "^5.1.1", + "css-loader": "^3.5.3", + "cssnano": "^4.1.10", + "debug": "^4.1.1", + "default-gateway": "^5.0.5", + "dotenv": "^8.2.0", + "dotenv-expand": "^5.1.0", + "file-loader": "^4.2.0", + "fs-extra": "^7.0.1", + "globby": "^9.2.0", + "hash-sum": "^2.0.0", + "html-webpack-plugin": "^3.2.0", + "launch-editor-middleware": "^2.2.1", + "lodash.defaultsdeep": "^4.6.1", + "lodash.mapvalues": "^4.6.0", + "lodash.transform": "^4.6.0", + "mini-css-extract-plugin": "^0.9.0", + "minimist": "^1.2.5", + "pnp-webpack-plugin": "^1.6.4", + "portfinder": "^1.0.26", + "postcss-loader": "^3.0.0", + "ssri": "^7.1.0", + "terser-webpack-plugin": "^2.3.6", + "thread-loader": "^2.1.3", + "url-loader": "^2.2.0", + "vue-loader": "^15.9.2", + "vue-loader-v16": "npm:vue-loader@^16.1.0", + "vue-style-loader": "^4.1.2", + "webpack": "^4.0.0", + "webpack-bundle-analyzer": "^3.8.0", + "webpack-chain": "^6.4.0", + "webpack-dev-server": "^3.11.0", + "webpack-merge": "^4.2.2" }, "dependencies": { - "ajv": { - "version": "5.5.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", - "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", + "acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "optional": true, "requires": { - "co": "^4.6.0", - "fast-deep-equal": "^1.0.0", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.3.0" + "color-convert": "^2.0.1" } }, - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true, - "optional": true + "cacache": { + "version": "13.0.1", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-13.0.1.tgz", + "integrity": "sha512-5ZvAxd05HDDU+y9BVvcqYu2LLXmPnQ0hW62h32g4xBTgL/MppR4/04NHfj/ycM2y6lmTnbw6HVi+1eN0Psba6w==", + "dev": true, + "requires": { + "chownr": "^1.1.2", + "figgy-pudding": "^3.5.1", + "fs-minipass": "^2.0.0", + "glob": "^7.1.4", + "graceful-fs": "^4.2.2", + "infer-owner": "^1.0.4", + "lru-cache": "^5.1.1", + "minipass": "^3.0.0", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.2", + "mkdirp": "^0.5.1", + "move-concurrently": "^1.0.1", + "p-map": "^3.0.0", + "promise-inflight": "^1.0.1", + "rimraf": "^2.7.1", + "ssri": "^7.0.0", + "unique-filename": "^1.1.1" + } }, - "cross-spawn": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", - "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", + "chalk": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", + "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", "dev": true, "optional": true, "requires": { - "lru-cache": "^4.0.1", - "shebang-command": "^1.2.0", - "which": "^1.2.9" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" } }, - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "optional": true, "requires": { - "ms": "^2.1.1" + "color-name": "~1.1.4" } }, - "eslint": { - "version": "4.19.1", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-4.19.1.tgz", - "integrity": "sha512-bT3/1x1EbZB7phzYu7vCr1v3ONuzDtX8WjuM9c0iYxe+cq+pwcKEoQjl7zd3RpC6YOLgnSy3cTN58M2jcoPDIQ==", + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true, - "optional": true, - "requires": { - "ajv": "^5.3.0", - "babel-code-frame": "^6.22.0", - "chalk": "^2.1.0", - "concat-stream": "^1.6.0", - "cross-spawn": "^5.1.0", - "debug": "^3.1.0", - "doctrine": "^2.1.0", - "eslint-scope": "^3.7.1", - "eslint-visitor-keys": "^1.0.0", - "espree": "^3.5.4", - "esquery": "^1.0.0", - "esutils": "^2.0.2", - "file-entry-cache": "^2.0.0", - "functional-red-black-tree": "^1.0.1", - "glob": "^7.1.2", - "globals": "^11.0.1", - "ignore": "^3.3.3", - "imurmurhash": "^0.1.4", - "inquirer": "^3.0.6", - "is-resolvable": "^1.0.0", - "js-yaml": "^3.9.1", - "json-stable-stringify-without-jsonify": "^1.0.1", - "levn": "^0.3.0", - "lodash": "^4.17.4", - "minimatch": "^3.0.2", - "mkdirp": "^0.5.1", - "natural-compare": "^1.4.0", - "optionator": "^0.8.2", - "path-is-inside": "^1.0.2", - "pluralize": "^7.0.0", - "progress": "^2.0.0", - "regexpp": "^1.0.1", - "require-uncached": "^1.0.3", - "semver": "^5.3.0", - "strip-ansi": "^4.0.0", - "strip-json-comments": "~2.0.1", - "table": "4.0.2", - "text-table": "~0.2.0" - } - }, - "eslint-plugin-vue": { - "version": "4.7.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-vue/-/eslint-plugin-vue-4.7.1.tgz", - "integrity": "sha512-esETKhVMI7Vdli70Wt4bvAwnZBJeM0pxVX9Yb0wWKxdCJc2EADalVYK/q2FzMw8oKN0wPMdqVCKS8kmR89recA==", + "optional": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true, - "optional": true, - "requires": { - "vue-eslint-parser": "^2.0.3" - } + "optional": true }, - "eslint-scope": { - "version": "3.7.3", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-3.7.3.tgz", - "integrity": "sha512-W+B0SvF4gamyCTmUc+uITPY0989iXVfKvhwtmJocTaYoc/3khEHmEmvfY/Gn9HA9VV75jrQECsHizkNw1b68FA==", + "loader-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.0.tgz", + "integrity": "sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ==", "dev": true, "optional": true, "requires": { - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" } }, - "fast-deep-equal": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", - "integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ=", - "dev": true, - "optional": true + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true }, - "json-schema-traverse": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", - "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=", + "ssri": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-7.1.0.tgz", + "integrity": "sha512-77/WrDZUWocK0mvA5NTRQyveUf+wsrIc6vyrxpS8tVvYBcX215QbafrJR3KtkpskIzoFLqqNuuYQvxaMjXJ/0g==", "dev": true, - "optional": true + "requires": { + "figgy-pudding": "^3.5.1", + "minipass": "^3.1.1" + } }, - "lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "optional": true, "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" + "has-flag": "^4.0.0" } }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "terser-webpack-plugin": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-2.3.8.tgz", + "integrity": "sha512-/fKw3R+hWyHfYx7Bv6oPqmk4HGQcrWLtV3X6ggvPuwPNHSnzvVV51z6OaaCOus4YLjutYGOz3pEpbhe6Up2s1w==", "dev": true, - "optional": true, "requires": { - "ansi-regex": "^3.0.0" + "cacache": "^13.0.1", + "find-cache-dir": "^3.3.1", + "jest-worker": "^25.4.0", + "p-limit": "^2.3.0", + "schema-utils": "^2.6.6", + "serialize-javascript": "^4.0.0", + "source-map": "^0.6.1", + "terser": "^4.6.12", + "webpack-sources": "^1.4.3" } }, - "yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", + "vue-loader-v16": { + "version": "npm:vue-loader@16.1.2", + "resolved": "https://registry.npmjs.org/vue-loader/-/vue-loader-16.1.2.tgz", + "integrity": "sha512-8QTxh+Fd+HB6fiL52iEVLKqE9N1JSlMXLR92Ijm6g8PZrwIxckgpqjPDWRP5TWxdiPaHR+alUWsnu1ShQOwt+Q==", "dev": true, - "optional": true - } - } - }, - "@vue/cli-service": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@vue/cli-service/-/cli-service-3.9.2.tgz", - "integrity": "sha512-R4L9tCMpJ4DzLgu/aU9CEtl5QYsj/FXRrtEgXSKm+71OVtA/o2rkLTC8SLB2Bu7wHP/HCYbaoy4NZqSEQzTuLw==", - "dev": true, - "requires": { - "@intervolga/optimize-cssnano-plugin": "^1.0.5", - "@soda/friendly-errors-webpack-plugin": "^1.7.1", - "@vue/cli-overlay": "^3.9.0", - "@vue/cli-shared-utils": "^3.9.0", - "@vue/component-compiler-utils": "^2.6.0", - "@vue/preload-webpack-plugin": "^1.1.0", - "@vue/web-component-wrapper": "^1.2.0", - "acorn": "^6.1.1", - "acorn-walk": "^6.1.1", - "address": "^1.0.3", - "autoprefixer": "^9.5.1", - "browserslist": "^4.5.4", - "cache-loader": "^2.0.1", - "case-sensitive-paths-webpack-plugin": "^2.2.0", - "chalk": "^2.4.2", - "cli-highlight": "^2.1.0", - "clipboardy": "^2.0.0", - "cliui": "^5.0.0", - "copy-webpack-plugin": "^4.6.0", - "css-loader": "^1.0.1", - "cssnano": "^4.1.10", - "current-script-polyfill": "^1.0.0", - "debug": "^4.1.1", - "default-gateway": "^5.0.2", - "dotenv": "^7.0.0", - "dotenv-expand": "^5.1.0", - "escape-string-regexp": "^1.0.5", - "file-loader": "^3.0.1", - "fs-extra": "^7.0.1", - "globby": "^9.2.0", - "hash-sum": "^1.0.2", - "html-webpack-plugin": "^3.2.0", - "launch-editor-middleware": "^2.2.1", - "lodash.defaultsdeep": "^4.6.0", - "lodash.mapvalues": "^4.6.0", - "lodash.transform": "^4.6.0", - "mini-css-extract-plugin": "^0.6.0", - "minimist": "^1.2.0", - "ora": "^3.4.0", - "portfinder": "^1.0.20", - "postcss-loader": "^3.0.0", - "read-pkg": "^5.0.0", - "semver": "^6.0.0", - "slash": "^2.0.0", - "source-map-url": "^0.4.0", - "ssri": "^6.0.1", - "string.prototype.padend": "^3.0.0", - "terser-webpack-plugin": "^1.2.3", - "thread-loader": "^2.1.2", - "url-loader": "^1.1.2", - "vue-loader": "^15.7.0", - "webpack": ">=4 < 4.29", - "webpack-bundle-analyzer": "^3.3.0", - "webpack-chain": "^4.11.0", - "webpack-dev-server": "^3.4.1", - "webpack-merge": "^4.2.1" - }, - "dependencies": { - "acorn": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.2.0.tgz", - "integrity": "sha512-8oe72N3WPMjA+2zVG71Ia0nXZ8DpQH+QyyHO+p06jT8eg8FGG3FbcUIi8KziHlAfheJQZeoqbvq1mQSQHXKYLw==", - "dev": true - }, - "semver": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.2.0.tgz", - "integrity": "sha512-jdFC1VdUGT/2Scgbimf7FSx9iJLXoqfglSF+gJeuNWVpiE37OIbc1jywR/GJyFdz3mnkz2/id0L0J/cr0izR5A==", - "dev": true + "optional": true, + "requires": { + "chalk": "^4.1.0", + "hash-sum": "^2.0.0", + "loader-utils": "^2.0.0" + } } } }, "@vue/cli-shared-utils": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/@vue/cli-shared-utils/-/cli-shared-utils-3.9.0.tgz", - "integrity": "sha512-wumeMZTz5aQ+1Y6uxTKegIsgOXEWT3hT8f9sW2mj5SwNDVyQ+AHZTgSynYExTUJg3dH81uKgFDUpPdAvGxzh8g==", + "version": "4.5.11", + "resolved": "https://registry.npmjs.org/@vue/cli-shared-utils/-/cli-shared-utils-4.5.11.tgz", + "integrity": "sha512-+aaQ+ThQG3+WMexfSWNl0y6f43edqVqRNbguE53F3TIH81I7saS5S750ayqXhZs2r6STJJyqorQnKtAWfHo29A==", "dev": true, "requires": { "@hapi/joi": "^15.0.1", - "chalk": "^2.4.1", + "chalk": "^2.4.2", "execa": "^1.0.0", "launch-editor": "^2.2.1", "lru-cache": "^5.1.1", "node-ipc": "^9.1.1", "open": "^6.3.0", "ora": "^3.4.0", - "request": "^2.87.0", - "request-promise-native": "^1.0.7", - "semver": "^6.0.0", - "string.prototype.padstart": "^3.0.0" + "read-pkg": "^5.1.1", + "request": "^2.88.2", + "semver": "^6.1.0", + "strip-ansi": "^6.0.0" }, "dependencies": { "semver": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.2.0.tgz", - "integrity": "sha512-jdFC1VdUGT/2Scgbimf7FSx9iJLXoqfglSF+gJeuNWVpiE37OIbc1jywR/GJyFdz3mnkz2/id0L0J/cr0izR5A==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true } } }, "@vue/component-compiler-utils": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/@vue/component-compiler-utils/-/component-compiler-utils-2.6.0.tgz", - "integrity": "sha512-IHjxt7LsOFYc0DkTncB7OXJL7UzwOLPPQCfEUNyxL2qt+tF12THV+EO33O1G2Uk4feMSWua3iD39Itszx0f0bw==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/@vue/component-compiler-utils/-/component-compiler-utils-3.2.0.tgz", + "integrity": "sha512-lejBLa7xAMsfiZfNp7Kv51zOzifnb29FwdnMLa96z26kXErPFioSf9BMcePVIQ6/Gc6/mC0UrPpxAWIHyae0vw==", "dev": true, "requires": { "consolidate": "^0.15.1", @@ -1392,12 +1877,18 @@ "lru-cache": "^4.1.2", "merge-source-map": "^1.1.0", "postcss": "^7.0.14", - "postcss-selector-parser": "^5.0.0", - "prettier": "1.16.3", + "postcss-selector-parser": "^6.0.2", + "prettier": "^1.18.2", "source-map": "~0.6.1", "vue-template-es2015-compiler": "^1.9.0" }, "dependencies": { + "hash-sum": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/hash-sum/-/hash-sum-1.0.2.tgz", + "integrity": "sha1-M7QHd3VMZDJXPBIMw4CLvRDUfwQ=", + "dev": true + }, "lru-cache": { "version": "4.1.5", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", @@ -1423,9 +1914,9 @@ } }, "@vue/preload-webpack-plugin": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@vue/preload-webpack-plugin/-/preload-webpack-plugin-1.1.0.tgz", - "integrity": "sha512-rcn2KhSHESBFMPj5vc5X2pI9bcBNQQixvJXhD5gZ4rN2iym/uH2qfDSQfUS5+qwiz0a85TCkeUs6w6jxFDudbw==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@vue/preload-webpack-plugin/-/preload-webpack-plugin-1.1.2.tgz", + "integrity": "sha512-LIZMuJk38pk9U9Ur4YzHjlIyMuxPlACdBIHH9/nGYVTsaGKOSnSuELiE8vS9wa+dJpIYspYUOqk+L1Q4pgHQHQ==", "dev": true }, "@vue/web-component-wrapper": { @@ -1435,175 +1926,178 @@ "dev": true }, "@webassemblyjs/ast": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.7.11.tgz", - "integrity": "sha512-ZEzy4vjvTzScC+SH8RBssQUawpaInUdMTYwYYLh54/s8TuT0gBLuyUnppKsVyZEi876VmmStKsUs28UxPgdvrA==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.9.0.tgz", + "integrity": "sha512-C6wW5L+b7ogSDVqymbkkvuW9kruN//YisMED04xzeBBqjHa2FYnmvOlS6Xj68xWQRgWvI9cIglsjFowH/RJyEA==", "dev": true, "requires": { - "@webassemblyjs/helper-module-context": "1.7.11", - "@webassemblyjs/helper-wasm-bytecode": "1.7.11", - "@webassemblyjs/wast-parser": "1.7.11" + "@webassemblyjs/helper-module-context": "1.9.0", + "@webassemblyjs/helper-wasm-bytecode": "1.9.0", + "@webassemblyjs/wast-parser": "1.9.0" } }, "@webassemblyjs/floating-point-hex-parser": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.7.11.tgz", - "integrity": "sha512-zY8dSNyYcgzNRNT666/zOoAyImshm3ycKdoLsyDw/Bwo6+/uktb7p4xyApuef1dwEBo/U/SYQzbGBvV+nru2Xg==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.9.0.tgz", + "integrity": "sha512-TG5qcFsS8QB4g4MhrxK5TqfdNe7Ey/7YL/xN+36rRjl/BlGE/NcBvJcqsRgCP6Z92mRE+7N50pRIi8SmKUbcQA==", "dev": true }, "@webassemblyjs/helper-api-error": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.7.11.tgz", - "integrity": "sha512-7r1qXLmiglC+wPNkGuXCvkmalyEstKVwcueZRP2GNC2PAvxbLYwLLPr14rcdJaE4UtHxQKfFkuDFuv91ipqvXg==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.9.0.tgz", + "integrity": "sha512-NcMLjoFMXpsASZFxJ5h2HZRcEhDkvnNFOAKneP5RbKRzaWJN36NC4jqQHKwStIhGXu5mUWlUUk7ygdtrO8lbmw==", "dev": true }, "@webassemblyjs/helper-buffer": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.7.11.tgz", - "integrity": "sha512-MynuervdylPPh3ix+mKZloTcL06P8tenNH3sx6s0qE8SLR6DdwnfgA7Hc9NSYeob2jrW5Vql6GVlsQzKQCa13w==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.9.0.tgz", + "integrity": "sha512-qZol43oqhq6yBPx7YM3m9Bv7WMV9Eevj6kMi6InKOuZxhw+q9hOkvq5e/PpKSiLfyetpaBnogSbNCfBwyB00CA==", "dev": true }, "@webassemblyjs/helper-code-frame": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.7.11.tgz", - "integrity": "sha512-T8ESC9KMXFTXA5urJcyor5cn6qWeZ4/zLPyWeEXZ03hj/x9weSokGNkVCdnhSabKGYWxElSdgJ+sFa9G/RdHNw==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-code-frame/-/helper-code-frame-1.9.0.tgz", + "integrity": "sha512-ERCYdJBkD9Vu4vtjUYe8LZruWuNIToYq/ME22igL+2vj2dQ2OOujIZr3MEFvfEaqKoVqpsFKAGsRdBSBjrIvZA==", "dev": true, "requires": { - "@webassemblyjs/wast-printer": "1.7.11" + "@webassemblyjs/wast-printer": "1.9.0" } }, "@webassemblyjs/helper-fsm": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-fsm/-/helper-fsm-1.7.11.tgz", - "integrity": "sha512-nsAQWNP1+8Z6tkzdYlXT0kxfa2Z1tRTARd8wYnc/e3Zv3VydVVnaeePgqUzFrpkGUyhUUxOl5ML7f1NuT+gC0A==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-fsm/-/helper-fsm-1.9.0.tgz", + "integrity": "sha512-OPRowhGbshCb5PxJ8LocpdX9Kl0uB4XsAjl6jH/dWKlk/mzsANvhwbiULsaiqT5GZGT9qinTICdj6PLuM5gslw==", "dev": true }, "@webassemblyjs/helper-module-context": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-module-context/-/helper-module-context-1.7.11.tgz", - "integrity": "sha512-JxfD5DX8Ygq4PvXDucq0M+sbUFA7BJAv/GGl9ITovqE+idGX+J3QSzJYz+LwQmL7fC3Rs+utvWoJxDb6pmC0qg==", - "dev": true + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-module-context/-/helper-module-context-1.9.0.tgz", + "integrity": "sha512-MJCW8iGC08tMk2enck1aPW+BE5Cw8/7ph/VGZxwyvGbJwjktKkDK7vy7gAmMDx88D7mhDTCNKAW5tED+gZ0W8g==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.9.0" + } }, "@webassemblyjs/helper-wasm-bytecode": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.7.11.tgz", - "integrity": "sha512-cMXeVS9rhoXsI9LLL4tJxBgVD/KMOKXuFqYb5oCJ/opScWpkCMEz9EJtkonaNcnLv2R3K5jIeS4TRj/drde1JQ==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.9.0.tgz", + "integrity": "sha512-R7FStIzyNcd7xKxCZH5lE0Bqy+hGTwS3LJjuv1ZVxd9O7eHCedSdrId/hMOd20I+v8wDXEn+bjfKDLzTepoaUw==", "dev": true }, "@webassemblyjs/helper-wasm-section": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.7.11.tgz", - "integrity": "sha512-8ZRY5iZbZdtNFE5UFunB8mmBEAbSI3guwbrsCl4fWdfRiAcvqQpeqd5KHhSWLL5wuxo53zcaGZDBU64qgn4I4Q==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.9.0.tgz", + "integrity": "sha512-XnMB8l3ek4tvrKUUku+IVaXNHz2YsJyOOmz+MMkZvh8h1uSJpSen6vYnw3IoQ7WwEuAhL8Efjms1ZWjqh2agvw==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-buffer": "1.7.11", - "@webassemblyjs/helper-wasm-bytecode": "1.7.11", - "@webassemblyjs/wasm-gen": "1.7.11" + "@webassemblyjs/ast": "1.9.0", + "@webassemblyjs/helper-buffer": "1.9.0", + "@webassemblyjs/helper-wasm-bytecode": "1.9.0", + "@webassemblyjs/wasm-gen": "1.9.0" } }, "@webassemblyjs/ieee754": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.7.11.tgz", - "integrity": "sha512-Mmqx/cS68K1tSrvRLtaV/Lp3NZWzXtOHUW2IvDvl2sihAwJh4ACE0eL6A8FvMyDG9abes3saB6dMimLOs+HMoQ==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.9.0.tgz", + "integrity": "sha512-dcX8JuYU/gvymzIHc9DgxTzUUTLexWwt8uCTWP3otys596io0L5aW02Gb1RjYpx2+0Jus1h4ZFqjla7umFniTg==", "dev": true, "requires": { "@xtuc/ieee754": "^1.2.0" } }, "@webassemblyjs/leb128": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.7.11.tgz", - "integrity": "sha512-vuGmgZjjp3zjcerQg+JA+tGOncOnJLWVkt8Aze5eWQLwTQGNgVLcyOTqgSCxWTR4J42ijHbBxnuRaL1Rv7XMdw==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.9.0.tgz", + "integrity": "sha512-ENVzM5VwV1ojs9jam6vPys97B/S65YQtv/aanqnU7D8aSoHFX8GyhGg0CMfyKNIHBuAVjy3tlzd5QMMINa7wpw==", "dev": true, "requires": { - "@xtuc/long": "4.2.1" + "@xtuc/long": "4.2.2" } }, "@webassemblyjs/utf8": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.7.11.tgz", - "integrity": "sha512-C6GFkc7aErQIAH+BMrIdVSmW+6HSe20wg57HEC1uqJP8E/xpMjXqQUxkQw07MhNDSDcGpxI9G5JSNOQCqJk4sA==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.9.0.tgz", + "integrity": "sha512-GZbQlWtopBTP0u7cHrEx+73yZKrQoBMpwkGEIqlacljhXCkVM1kMQge/Mf+csMJAjEdSwhOyLAS0AoR3AG5P8w==", "dev": true }, "@webassemblyjs/wasm-edit": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.7.11.tgz", - "integrity": "sha512-FUd97guNGsCZQgeTPKdgxJhBXkUbMTY6hFPf2Y4OedXd48H97J+sOY2Ltaq6WGVpIH8o/TGOVNiVz/SbpEMJGg==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.9.0.tgz", + "integrity": "sha512-FgHzBm80uwz5M8WKnMTn6j/sVbqilPdQXTWraSjBwFXSYGirpkSWE2R9Qvz9tNiTKQvoKILpCuTjBKzOIm0nxw==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-buffer": "1.7.11", - "@webassemblyjs/helper-wasm-bytecode": "1.7.11", - "@webassemblyjs/helper-wasm-section": "1.7.11", - "@webassemblyjs/wasm-gen": "1.7.11", - "@webassemblyjs/wasm-opt": "1.7.11", - "@webassemblyjs/wasm-parser": "1.7.11", - "@webassemblyjs/wast-printer": "1.7.11" + "@webassemblyjs/ast": "1.9.0", + "@webassemblyjs/helper-buffer": "1.9.0", + "@webassemblyjs/helper-wasm-bytecode": "1.9.0", + "@webassemblyjs/helper-wasm-section": "1.9.0", + "@webassemblyjs/wasm-gen": "1.9.0", + "@webassemblyjs/wasm-opt": "1.9.0", + "@webassemblyjs/wasm-parser": "1.9.0", + "@webassemblyjs/wast-printer": "1.9.0" } }, "@webassemblyjs/wasm-gen": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.7.11.tgz", - "integrity": "sha512-U/KDYp7fgAZX5KPfq4NOupK/BmhDc5Kjy2GIqstMhvvdJRcER/kUsMThpWeRP8BMn4LXaKhSTggIJPOeYHwISA==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.9.0.tgz", + "integrity": "sha512-cPE3o44YzOOHvlsb4+E9qSqjc9Qf9Na1OO/BHFy4OI91XDE14MjFN4lTMezzaIWdPqHnsTodGGNP+iRSYfGkjA==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-wasm-bytecode": "1.7.11", - "@webassemblyjs/ieee754": "1.7.11", - "@webassemblyjs/leb128": "1.7.11", - "@webassemblyjs/utf8": "1.7.11" + "@webassemblyjs/ast": "1.9.0", + "@webassemblyjs/helper-wasm-bytecode": "1.9.0", + "@webassemblyjs/ieee754": "1.9.0", + "@webassemblyjs/leb128": "1.9.0", + "@webassemblyjs/utf8": "1.9.0" } }, "@webassemblyjs/wasm-opt": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.7.11.tgz", - "integrity": "sha512-XynkOwQyiRidh0GLua7SkeHvAPXQV/RxsUeERILmAInZegApOUAIJfRuPYe2F7RcjOC9tW3Cb9juPvAC/sCqvg==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.9.0.tgz", + "integrity": "sha512-Qkjgm6Anhm+OMbIL0iokO7meajkzQD71ioelnfPEj6r4eOFuqm4YC3VBPqXjFyyNwowzbMD+hizmprP/Fwkl2A==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-buffer": "1.7.11", - "@webassemblyjs/wasm-gen": "1.7.11", - "@webassemblyjs/wasm-parser": "1.7.11" + "@webassemblyjs/ast": "1.9.0", + "@webassemblyjs/helper-buffer": "1.9.0", + "@webassemblyjs/wasm-gen": "1.9.0", + "@webassemblyjs/wasm-parser": "1.9.0" } }, "@webassemblyjs/wasm-parser": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.7.11.tgz", - "integrity": "sha512-6lmXRTrrZjYD8Ng8xRyvyXQJYUQKYSXhJqXOBLw24rdiXsHAOlvw5PhesjdcaMadU/pyPQOJ5dHreMjBxwnQKg==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.9.0.tgz", + "integrity": "sha512-9+wkMowR2AmdSWQzsPEjFU7njh8HTO5MqO8vjwEHuM+AMHioNqSBONRdr0NQQ3dVQrzp0s8lTcYqzUdb7YgELA==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-api-error": "1.7.11", - "@webassemblyjs/helper-wasm-bytecode": "1.7.11", - "@webassemblyjs/ieee754": "1.7.11", - "@webassemblyjs/leb128": "1.7.11", - "@webassemblyjs/utf8": "1.7.11" + "@webassemblyjs/ast": "1.9.0", + "@webassemblyjs/helper-api-error": "1.9.0", + "@webassemblyjs/helper-wasm-bytecode": "1.9.0", + "@webassemblyjs/ieee754": "1.9.0", + "@webassemblyjs/leb128": "1.9.0", + "@webassemblyjs/utf8": "1.9.0" } }, "@webassemblyjs/wast-parser": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-parser/-/wast-parser-1.7.11.tgz", - "integrity": "sha512-lEyVCg2np15tS+dm7+JJTNhNWq9yTZvi3qEhAIIOaofcYlUp0UR5/tVqOwa/gXYr3gjwSZqw+/lS9dscyLelbQ==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-parser/-/wast-parser-1.9.0.tgz", + "integrity": "sha512-qsqSAP3QQ3LyZjNC/0jBJ/ToSxfYJ8kYyuiGvtn/8MK89VrNEfwj7BPQzJVHi0jGTRK2dGdJ5PRqhtjzoww+bw==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/floating-point-hex-parser": "1.7.11", - "@webassemblyjs/helper-api-error": "1.7.11", - "@webassemblyjs/helper-code-frame": "1.7.11", - "@webassemblyjs/helper-fsm": "1.7.11", - "@xtuc/long": "4.2.1" + "@webassemblyjs/ast": "1.9.0", + "@webassemblyjs/floating-point-hex-parser": "1.9.0", + "@webassemblyjs/helper-api-error": "1.9.0", + "@webassemblyjs/helper-code-frame": "1.9.0", + "@webassemblyjs/helper-fsm": "1.9.0", + "@xtuc/long": "4.2.2" } }, "@webassemblyjs/wast-printer": { - "version": "1.7.11", - "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.7.11.tgz", - "integrity": "sha512-m5vkAsuJ32QpkdkDOUPGSltrg8Cuk3KBx4YrmAGQwCZPRdUHXxG4phIOuuycLemHFr74sWL9Wthqss4fzdzSwg==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.9.0.tgz", + "integrity": "sha512-2J0nE95rHXHyQ24cWjMKJ1tqB/ds8z/cyeOZxJhcb+rW+SQASVjuznUSmdz5GpVJTzU8JkhYut0D3siFDD6wsA==", "dev": true, "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/wast-parser": "1.7.11", - "@xtuc/long": "4.2.1" + "@webassemblyjs/ast": "1.9.0", + "@webassemblyjs/wast-parser": "1.9.0", + "@xtuc/long": "4.2.2" } }, "@xtuc/ieee754": { @@ -1613,9 +2107,9 @@ "dev": true }, "@xtuc/long": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.1.tgz", - "integrity": "sha512-FZdkNBDqBRHKQ2MEbSC17xnPFOhZxeJ2YGSfr2BKf3sujG49Qe3bB+rGCwQfIaA7WHnGeGkSijX4FuBCdrzW/g==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", "dev": true }, "accepts": { @@ -1629,58 +2123,46 @@ } }, "acorn": { - "version": "5.7.3", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-5.7.3.tgz", - "integrity": "sha512-T/zvzYRfbVojPWahDsE5evJdHb3oJoQfFbsrKM7w5Zcs++Tr257tia3BmMP8XYVjp1S9RZXQMh7gao96BlqZOw==", + "version": "6.4.2", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.4.2.tgz", + "integrity": "sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ==", "dev": true }, - "acorn-dynamic-import": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/acorn-dynamic-import/-/acorn-dynamic-import-3.0.0.tgz", - "integrity": "sha512-zVWV8Z8lislJoOKKqdNMOB+s6+XV5WERty8MnKBeFgwA+19XJjJHs2RP5dzM57FftIs+jQnRToLiWazKr6sSWg==", - "dev": true, - "requires": { - "acorn": "^5.0.0" - } - }, "acorn-jsx": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-3.0.1.tgz", - "integrity": "sha1-r9+UiPsezvyDSPb7IvRk4ypYs2s=", - "dev": true, - "optional": true, - "requires": { - "acorn": "^3.0.4" - }, - "dependencies": { - "acorn": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-3.3.0.tgz", - "integrity": "sha1-ReN/s56No/JbruP/U2niu18iAXo=", - "dev": true, - "optional": true - } - } + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.1.tgz", + "integrity": "sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng==", + "dev": true }, "acorn-walk": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-6.2.0.tgz", - "integrity": "sha512-7evsyfH1cLOCdAzZAd43Cic04yKydNx0cF+7tiA19p1XnLLPU4dpCQOqpjqwokFe//vS0QqfqqjCS2JkiIs0cA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-7.2.0.tgz", + "integrity": "sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA==", "dev": true }, "address": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/address/-/address-1.1.0.tgz", - "integrity": "sha512-4diPfzWbLEIElVG4AnqP+00SULlPzNuyJFNnmMrLgyaxG6tZXJ1sn7mjBu4fHrJE+Yp/jgylOweJn2xsLMFggQ==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/address/-/address-1.1.2.tgz", + "integrity": "sha512-aT6camzM4xEA54YVJYSqxz1kv4IHnQZRtThJJHhUMRExaU5spC7jX5ugSwTaTgJliIgs4VhZOk7htClvQ/LmRA==", "dev": true }, + "aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "requires": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + } + }, "ajv": { - "version": "6.10.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.1.tgz", - "integrity": "sha512-w1YQaVGNC6t2UCPjEawK/vo/dG8OOrVtUmhBT1uJJYxbl5kU2Tj3v6LGqBcsysN1yhuCStJCCA3GqdvKY8sqXQ==", + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dev": true, "requires": { - "fast-deep-equal": "^2.0.1", + "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" @@ -1693,9 +2175,9 @@ "dev": true }, "ajv-keywords": { - "version": "3.4.1", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.4.1.tgz", - "integrity": "sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==", + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", "dev": true }, "alphanum-sort": { @@ -1704,12 +2186,6 @@ "integrity": "sha1-l6ERlkmyEa0zaR2fn0hqjsn74KM=", "dev": true }, - "amdefine": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/amdefine/-/amdefine-1.0.1.tgz", - "integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=", - "dev": true - }, "ansi-colors": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.4.tgz", @@ -1717,10 +2193,21 @@ "dev": true }, "ansi-escapes": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz", - "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ==", - "dev": true + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz", + "integrity": "sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA==", + "dev": true, + "requires": { + "type-fest": "^0.11.0" + }, + "dependencies": { + "type-fest": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.11.0.tgz", + "integrity": "sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ==", + "dev": true + } + } }, "ansi-html": { "version": "0.0.7", @@ -1750,24 +2237,13 @@ "dev": true }, "anymatch": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", - "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.1.tgz", + "integrity": "sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==", "dev": true, "requires": { - "micromatch": "^3.1.4", - "normalize-path": "^2.1.1" - }, - "dependencies": { - "normalize-path": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", - "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", - "dev": true, - "requires": { - "remove-trailing-separator": "^1.0.1" - } - } + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" } }, "aproba": { @@ -1777,9 +2253,9 @@ "dev": true }, "arch": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/arch/-/arch-2.1.1.tgz", - "integrity": "sha512-BLM56aPo9vLLFVa8+/+pJLnrZ7QGGTVHWsCwieAWT9o9K8UeGaQbzZbGoabWLOo2ksBCztoXdqBZBplqLDDCSg==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", + "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", "dev": true }, "argparse": { @@ -1809,30 +2285,12 @@ "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=", "dev": true }, - "array-filter": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/array-filter/-/array-filter-0.0.1.tgz", - "integrity": "sha1-fajPLiZijtcygDWB/SH2fKzS7uw=", - "dev": true - }, "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=", "dev": true }, - "array-map": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/array-map/-/array-map-0.0.0.tgz", - "integrity": "sha1-iKK6tz0c97zVwbEYoAP2b2ZfpmI=", - "dev": true - }, - "array-reduce": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/array-reduce/-/array-reduce-0.0.0.tgz", - "integrity": "sha1-FziZ0//Rx9k4PkR5Ul2+J4yrXys=", - "dev": true - }, "array-union": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/array-union/-/array-union-1.0.2.tgz", @@ -1864,14 +2322,23 @@ } }, "asn1.js": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-4.10.1.tgz", - "integrity": "sha512-p32cOF5q0Zqs9uBiONKYLm6BClCoBCM5O9JfeUSlnQLBTxYdTK+pW+nXflm8UkKd2UYlEbYz5qEi0JuZR9ckSw==", + "version": "5.4.1", + "resolved": "https://registry.npmjs.org/asn1.js/-/asn1.js-5.4.1.tgz", + "integrity": "sha512-+I//4cYPccV8LdmBLiX8CYvf9Sp3vQsrqu2QNXRcrbiWvcx/UdlFiqUJJzxRQxgsZmvhXhn4cSKeSmoFjVdupA==", "dev": true, "requires": { "bn.js": "^4.0.0", "inherits": "^2.0.1", - "minimalistic-assert": "^1.0.0" + "minimalistic-assert": "^1.0.0", + "safer-buffer": "^2.1.0" + }, + "dependencies": { + "bn.js": { + "version": "4.11.9", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", + "integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==", + "dev": true + } } }, "assert": { @@ -1920,10 +2387,13 @@ "dev": true }, "async": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/async/-/async-1.5.2.tgz", - "integrity": "sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=", - "dev": true + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", + "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", + "dev": true, + "requires": { + "lodash": "^4.17.14" + } }, "async-each": { "version": "1.0.3", @@ -1932,9 +2402,9 @@ "dev": true }, "async-limiter": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.0.tgz", - "integrity": "sha512-jp/uFnooOiO+L211eZOoSyzpOITMXx1rBITauYykG3BRYPu8h0UcxsPNB04RR5vo4Tyz3+ay17tR6JVf9qzYWg==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/async-limiter/-/async-limiter-1.0.1.tgz", + "integrity": "sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==", "dev": true }, "asynckit": { @@ -1950,26 +2420,18 @@ "dev": true }, "autoprefixer": { - "version": "9.6.1", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-9.6.1.tgz", - "integrity": "sha512-aVo5WxR3VyvyJxcJC3h4FKfwCQvQWb1tSI5VHNibddCVWrcD1NvlxEweg3TSgiPztMnWfjpy2FURKA2kvDE+Tw==", + "version": "9.8.6", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-9.8.6.tgz", + "integrity": "sha512-XrvP4VVHdRBCdX1S3WXVD8+RyG9qeb1D5Sn1DeLiG2xfSpzellk5k54xbUERJ3M5DggQxes39UGOTP8CFrEGbg==", "dev": true, "requires": { - "browserslist": "^4.6.3", - "caniuse-lite": "^1.0.30000980", - "chalk": "^2.4.2", + "browserslist": "^4.12.0", + "caniuse-lite": "^1.0.30001109", + "colorette": "^1.2.1", "normalize-range": "^0.1.2", "num2fraction": "^1.2.2", - "postcss": "^7.0.17", - "postcss-value-parser": "^4.0.0" - }, - "dependencies": { - "postcss-value-parser": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.0.0.tgz", - "integrity": "sha512-ESPktioptiSUchCKgggAkzdmkgzKfmp0EU8jXH+5kbIUB+unr0Y4CY9SRMvibuvYUBjNh1ACLbxqYNpdTQOteQ==", - "dev": true - } + "postcss": "^7.0.32", + "postcss-value-parser": "^4.1.0" } }, "aws-sign2": { @@ -1979,9 +2441,9 @@ "dev": true }, "aws4": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", - "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==", + "version": "1.11.0", + "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz", + "integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA==", "dev": true }, "axios": { @@ -2013,9 +2475,9 @@ } }, "is-buffer": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.3.tgz", - "integrity": "sha512-U15Q7MXTuZlrbymiz95PJpZxu8IlipAp4dtS3wOdgPXx3mqBnslrWU14kxfHB+Py/+2PVKSr37dMAgM2A4uArw==", + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz", + "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==", "dev": true }, "ms": { @@ -2026,125 +2488,41 @@ } } }, - "babel-code-frame": { - "version": "6.26.0", - "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", - "integrity": "sha1-Y/1D99weO7fONZR9uP42mj9Yx0s=", - "dev": true, - "requires": { - "chalk": "^1.1.3", - "esutils": "^2.0.2", - "js-tokens": "^3.0.2" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - }, - "ansi-styles": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-2.2.1.tgz", - "integrity": "sha1-tDLdM1i2NM914eRmQ2gkBTPB3b4=", - "dev": true - }, - "chalk": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-1.1.3.tgz", - "integrity": "sha1-qBFcVeSnAv5NFQq9OHKCKn4J/Jg=", - "dev": true, - "requires": { - "ansi-styles": "^2.2.1", - "escape-string-regexp": "^1.0.2", - "has-ansi": "^2.0.0", - "strip-ansi": "^3.0.0", - "supports-color": "^2.0.0" - } - }, - "js-tokens": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-3.0.2.tgz", - "integrity": "sha1-mGbfOVECEw449/mWvOtlRDIJwls=", - "dev": true - }, - "strip-ansi": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", - "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "supports-color": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-2.0.0.tgz", - "integrity": "sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=", - "dev": true - } - } - }, "babel-eslint": { - "version": "10.0.2", - "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.0.2.tgz", - "integrity": "sha512-UdsurWPtgiPgpJ06ryUnuaSXC2s0WoSZnQmEpbAH65XZSdwowgN5MvyP7e88nW07FYXv72erVtpBkxyDVKhH1Q==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/babel-eslint/-/babel-eslint-10.1.0.tgz", + "integrity": "sha512-ifWaTHQ0ce+448CYop8AdrQiBsGrnC+bMgfyKFdi6EsPLTAWG+QfyDeM6OH+FmWnKvEq5NnBMLvlBUPKQZoDSg==", "dev": true, "requires": { "@babel/code-frame": "^7.0.0", - "@babel/parser": "^7.0.0", - "@babel/traverse": "^7.0.0", - "@babel/types": "^7.0.0", - "eslint-scope": "3.7.1", - "eslint-visitor-keys": "^1.0.0" - }, - "dependencies": { - "eslint-scope": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-3.7.1.tgz", - "integrity": "sha1-PWPD7f2gLgbgGkUq2IyqzHzctug=", - "dev": true, - "requires": { - "esrecurse": "^4.1.0", - "estraverse": "^4.1.1" - } - } + "@babel/parser": "^7.7.0", + "@babel/traverse": "^7.7.0", + "@babel/types": "^7.7.0", + "eslint-visitor-keys": "^1.0.0", + "resolve": "^1.12.0" } }, "babel-loader": { - "version": "8.0.6", - "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.0.6.tgz", - "integrity": "sha512-4BmWKtBOBm13uoUwd08UwjZlaw3O9GWf456R9j+5YykFZ6LUIjIKLc0zEZf+hauxPOJs96C8k6FvYD09vWzhYw==", + "version": "8.2.2", + "resolved": "https://registry.npmjs.org/babel-loader/-/babel-loader-8.2.2.tgz", + "integrity": "sha512-JvTd0/D889PQBtUXJ2PXaKU/pjZDMtHA9V2ecm+eNRmmBCMR09a+fmpGTNwnJtFmFl5Ei7Vy47LjBb+L0wQ99g==", "dev": true, "requires": { - "find-cache-dir": "^2.0.0", - "loader-utils": "^1.0.2", - "mkdirp": "^0.5.1", - "pify": "^4.0.1" + "find-cache-dir": "^3.3.1", + "loader-utils": "^1.4.0", + "make-dir": "^3.1.0", + "schema-utils": "^2.6.5" } }, "babel-plugin-dynamic-import-node": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz", - "integrity": "sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ==", + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz", + "integrity": "sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==", "dev": true, "requires": { "object.assign": "^4.1.0" } }, - "babel-plugin-module-resolver": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/babel-plugin-module-resolver/-/babel-plugin-module-resolver-3.2.0.tgz", - "integrity": "sha512-tjR0GvSndzPew/Iayf4uICWZqjBwnlMWjSx6brryfQ81F9rxBVqwDJtFCV8oOs0+vJeefK9TmdZtkIFdFe1UnA==", - "dev": true, - "requires": { - "find-babel-config": "^1.1.0", - "glob": "^7.1.2", - "pkg-up": "^2.0.0", - "reselect": "^3.0.1", - "resolve": "^1.4.0" - } - }, "balanced-match": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", @@ -2207,9 +2585,9 @@ } }, "base64-js": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", - "integrity": "sha512-ccav/yGvoa80BQDljCxsmmQ3Xvx60/UpBIij5QN21W3wBi/hhIC9OoO+KLpu9IJTS9j4DRVJ3aDDF9cMSoa2lw==", + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", "dev": true }, "batch": { @@ -2246,21 +2624,21 @@ "dev": true }, "binary-extensions": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", - "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", "dev": true }, "bluebird": { - "version": "3.5.5", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.5.tgz", - "integrity": "sha512-5am6HnnfN+urzt4yfg7IgTbotDjIT/u8AJpEt0sIU9FtXfVeezXAPKswrG+xKUCOYAINpSdgZVDU6QFh+cuH3w==", + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", "dev": true }, "bn.js": { - "version": "4.11.8", - "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.8.tgz", - "integrity": "sha512-ItfYfPLkWHUjckQCk8xC+LwxgK8NYcXywGigJgSwOP8Y2iyWT4f2vsZnoOXTTbo+o5yXmIUJ4gn5538SO5S3gA==", + "version": "5.1.3", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-5.1.3.tgz", + "integrity": "sha512-GkTiFpjFtUzU9CbMeJ5iazkCzGL3jrhzerzZIuqLABjbwRaFt33I9tUdSNryIptM+RxDet6OKm2WnLXzW51KsQ==", "dev": true }, "body-parser": { @@ -2415,28 +2793,49 @@ } }, "browserify-rsa": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.0.1.tgz", - "integrity": "sha1-IeCr+vbyApzy+vsTNWenAdQTVSQ=", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/browserify-rsa/-/browserify-rsa-4.1.0.tgz", + "integrity": "sha512-AdEER0Hkspgno2aR97SAf6vi0y0k8NuOpGnVH3O99rcA5Q6sh8QxcngtHuJ6uXwnfAXNM4Gn1Gb7/MV1+Ymbog==", "dev": true, "requires": { - "bn.js": "^4.1.0", + "bn.js": "^5.0.0", "randombytes": "^2.0.1" } }, "browserify-sign": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.0.4.tgz", - "integrity": "sha1-qk62jl17ZYuqa/alfmMMvXqT0pg=", - "dev": true, - "requires": { - "bn.js": "^4.1.1", - "browserify-rsa": "^4.0.0", - "create-hash": "^1.1.0", - "create-hmac": "^1.1.2", - "elliptic": "^6.0.0", - "inherits": "^2.0.1", - "parse-asn1": "^5.0.0" + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/browserify-sign/-/browserify-sign-4.2.1.tgz", + "integrity": "sha512-/vrA5fguVAKKAVTNJjgSm1tRQDHUU6DbwO9IROu/0WAzC8PKhucDSh18J0RMvVeHAn5puMd+QHC2erPRNf8lmg==", + "dev": true, + "requires": { + "bn.js": "^5.1.1", + "browserify-rsa": "^4.0.1", + "create-hash": "^1.2.0", + "create-hmac": "^1.1.7", + "elliptic": "^6.5.3", + "inherits": "^2.0.4", + "parse-asn1": "^5.1.5", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + } } }, "browserify-zlib": { @@ -2449,20 +2848,22 @@ } }, "browserslist": { - "version": "4.6.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.6.4.tgz", - "integrity": "sha512-ErJT8qGfRt/VWHSr1HeqZzz50DvxHtr1fVL1m5wf20aGrG8e1ce8fpZ2EjZEfs09DDZYSvtRaDlMpWslBf8Low==", + "version": "4.16.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.16.3.tgz", + "integrity": "sha512-vIyhWmIkULaq04Gt93txdh+j02yX/JzlyhLYbV3YQCn/zvES3JnY7TifHHvvr1w5hTDluNKMkV05cs4vy8Q7sw==", "dev": true, "requires": { - "caniuse-lite": "^1.0.30000981", - "electron-to-chromium": "^1.3.188", - "node-releases": "^1.1.25" + "caniuse-lite": "^1.0.30001181", + "colorette": "^1.2.1", + "electron-to-chromium": "^1.3.649", + "escalade": "^3.1.1", + "node-releases": "^1.1.70" } }, "buffer": { - "version": "4.9.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", - "integrity": "sha1-bRu2AbB6TvztlwlBMgkwJ8lbwpg=", + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", + "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", "dev": true, "requires": { "base64-js": "^1.0.2", @@ -2482,6 +2883,12 @@ "integrity": "sha512-4/rOEg86jivtPTeOUUT61jJO1Ya1TrR/OkqCSZDyq84WJh3LuuiphBYJN+fm5xufIk4XAFcEwte/8WzC8If/1g==", "dev": true }, + "buffer-json": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-json/-/buffer-json-2.0.0.tgz", + "integrity": "sha512-+jjPFVqyfF1esi9fvfUs3NqM0pH1ziZ36VP4hmA/y/Ssfo/5w5xHKfTw9BwQjoJ1w/oVtpLomqwUHKdefGyuHw==", + "dev": true + }, "buffer-xor": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", @@ -2501,9 +2908,9 @@ "dev": true }, "cacache": { - "version": "11.3.3", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-11.3.3.tgz", - "integrity": "sha512-p8WcneCytvzPxhDvYp31PD039vi77I12W+/KfR9S8AZbaiARFBCpsPJS+9uhWfeBfeAtW7o/4vt3MUqLkbY6nA==", + "version": "12.0.4", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-12.0.4.tgz", + "integrity": "sha512-a0tMB40oefvuInr4Cwb3GerbL9xTj1D5yg0T5xrjGCGyfvbxseIXX7BAO/u/hIXdafzOI5JC3wDwHyf24buOAQ==", "dev": true, "requires": { "bluebird": "^3.5.5", @@ -2511,6 +2918,7 @@ "figgy-pudding": "^3.5.1", "glob": "^7.1.4", "graceful-fs": "^4.1.15", + "infer-owner": "^1.0.3", "lru-cache": "^5.1.1", "mississippi": "^3.0.0", "mkdirp": "^0.5.1", @@ -2540,29 +2948,27 @@ } }, "cache-loader": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/cache-loader/-/cache-loader-2.0.1.tgz", - "integrity": "sha512-V99T3FOynmGx26Zom+JrVBytLBsmUCzVG2/4NnUKgvXN4bEV42R1ERl1IyiH/cvFIDA1Ytq2lPZ9tXDSahcQpQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cache-loader/-/cache-loader-4.1.0.tgz", + "integrity": "sha512-ftOayxve0PwKzBF/GLsZNC9fJBXl8lkZE3TOsjkboHfVHVkL39iUEs1FO07A33mizmci5Dudt38UZrrYXDtbhw==", "dev": true, "requires": { - "loader-utils": "^1.1.0", + "buffer-json": "^2.0.0", + "find-cache-dir": "^3.0.0", + "loader-utils": "^1.2.3", "mkdirp": "^0.5.1", - "neo-async": "^2.6.0", - "normalize-path": "^3.0.0", - "schema-utils": "^1.0.0" - }, - "dependencies": { - "schema-utils": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", - "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", - "dev": true, - "requires": { - "ajv": "^6.1.0", - "ajv-errors": "^1.0.0", - "ajv-keywords": "^3.1.0" - } - } + "neo-async": "^2.6.1", + "schema-utils": "^2.0.0" + } + }, + "call-bind": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.2.tgz", + "integrity": "sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2" } }, "call-me-maybe": { @@ -2578,32 +2984,28 @@ "dev": true, "requires": { "callsites": "^2.0.0" - }, - "dependencies": { - "callsites": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", - "integrity": "sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA=", - "dev": true - } } }, "caller-path": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-0.1.0.tgz", - "integrity": "sha1-lAhe9jWB7NPaqSREqP6U6CV3dR8=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-2.0.0.tgz", + "integrity": "sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ=", "dev": true, - "optional": true, "requires": { - "callsites": "^0.2.0" + "caller-callsite": "^2.0.0" } }, + "callsite": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/callsite/-/callsite-1.0.0.tgz", + "integrity": "sha1-KAOY5dZkvXQDi28JBRU+borxvCA=", + "dev": true + }, "callsites": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/callsites/-/callsites-0.2.0.tgz", - "integrity": "sha1-r6uWJikQp/M8GaV3WCXGnzTjUMo=", - "dev": true, - "optional": true + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-2.0.0.tgz", + "integrity": "sha1-BuuE8A7qQT2oav/vrL/7Ngk7PFA=", + "dev": true }, "camel-case": { "version": "3.0.0", @@ -2616,9 +3018,9 @@ } }, "camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", + "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", "dev": true }, "caniuse-api": { @@ -2634,15 +3036,15 @@ } }, "caniuse-lite": { - "version": "1.0.30000983", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30000983.tgz", - "integrity": "sha512-/llD1bZ6qwNkt41AsvjsmwNOoA4ZB+8iqmf5LVyeSXuBODT/hAMFNVOh84NdUzoiYiSKqo5vQ3ZzeYHSi/olDQ==", + "version": "1.0.30001183", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001183.tgz", + "integrity": "sha512-7JkwTEE1hlRKETbCFd8HDZeLiQIUcl8rC6JgNjvHCNaxOeNmQ9V4LvQXRUsKIV2CC73qKxljwVhToaA3kLRqTw==", "dev": true }, "case-sensitive-paths-webpack-plugin": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.2.0.tgz", - "integrity": "sha512-u5ElzokS8A1pm9vM3/iDgTcI3xqHxuCao94Oz8etI3cf0Tio0p8izkDYbTIn09uP3yUUr6+veaE6IkjnTYS46g==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/case-sensitive-paths-webpack-plugin/-/case-sensitive-paths-webpack-plugin-2.3.0.tgz", + "integrity": "sha512-/4YgnZS8y1UXXmC02xD5rRrBEu6T5ub+mQHLNRj0fzTRbgdBYhsNo2V5EqwgqrExjxsjtF/OpAKAMkKsxbD5XQ==", "dev": true }, "caseless": { @@ -2663,12 +3065,11 @@ } }, "chardet": { - "version": "0.4.2", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.4.2.tgz", - "integrity": "sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I=", - "dev": true, - "optional": true - }, + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true + }, "check-types": { "version": "8.0.3", "resolved": "https://registry.npmjs.org/check-types/-/check-types-8.0.3.tgz", @@ -2676,29 +3077,60 @@ "dev": true }, "chokidar": { - "version": "2.1.6", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.6.tgz", - "integrity": "sha512-V2jUo67OKkc6ySiRpJrjlpJKl9kDuG+Xb8VgsGzb+aEouhgS1D0weyPU4lEzdAcsCAvrih2J2BqyXqHWvVLw5g==", - "dev": true, - "requires": { - "anymatch": "^2.0.0", - "async-each": "^1.0.1", - "braces": "^2.3.2", - "fsevents": "^1.2.7", - "glob-parent": "^3.1.0", - "inherits": "^2.0.3", - "is-binary-path": "^1.0.0", - "is-glob": "^4.0.0", - "normalize-path": "^3.0.0", - "path-is-absolute": "^1.0.0", - "readdirp": "^2.2.1", - "upath": "^1.1.1" + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", + "integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==", + "dev": true, + "requires": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "fsevents": "~2.3.1", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.5.0" + }, + "dependencies": { + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + } } }, "chownr": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.2.tgz", - "integrity": "sha512-GkfeAQh+QNy3wquu9oIZr6SS5x7wGdSgNQvD10X3r+AZr1Oys22HW8kAmDMvNg2+Dm0TeGaEuO8gFwdBXxwO8A==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", "dev": true }, "chrome-trace-event": { @@ -2726,13 +3158,6 @@ "safe-buffer": "^5.0.1" } }, - "circular-json": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/circular-json/-/circular-json-0.3.3.tgz", - "integrity": "sha512-UZK3NBx2Mca+b5LsG7bY183pHWt5Y1xts4P3Pz7ENTwGVnJOUWbRb3ocjvX7hx9tq/yTAdclXm9sZ38gNuem4A==", - "dev": true, - "optional": true - }, "class-utils": { "version": "0.3.6", "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", @@ -2757,9 +3182,9 @@ } }, "clean-css": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-4.2.1.tgz", - "integrity": "sha512-4ZxI6dy4lrY6FHzfiy1aEOXgu4LIsW2MhwG0VBKdcoGoH/XLFgaHSdLTGr4O8Be6A8r3MOphEiI8Gc1n0ecf3g==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/clean-css/-/clean-css-4.2.3.tgz", + "integrity": "sha512-VcMWDN54ZN/DS+g58HYL5/n4Zrqe8vHJpGA8KdgUXFU4fuP/aHNw8eld9SyEIyabIMJX/0RaY/fplOo5hYLSFA==", "dev": true, "requires": { "source-map": "~0.6.0" @@ -2773,6 +3198,12 @@ } } }, + "clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true + }, "cli-cursor": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", @@ -2783,60 +3214,148 @@ } }, "cli-highlight": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/cli-highlight/-/cli-highlight-2.1.1.tgz", - "integrity": "sha512-0y0VlNmdD99GXZHYnvrQcmHxP8Bi6T00qucGgBgGv4kJ0RyDthNnnFPupHV7PYv/OXSVk+azFbOeaW6+vGmx9A==", + "version": "2.1.10", + "resolved": "https://registry.npmjs.org/cli-highlight/-/cli-highlight-2.1.10.tgz", + "integrity": "sha512-CcPFD3JwdQ2oSzy+AMG6j3LRTkNjM82kzcSKzoVw6cLanDCJNlsLjeqVTOTfOfucnWv5F0rmBemVf1m9JiIasw==", "dev": true, "requires": { - "chalk": "^2.3.0", - "highlight.js": "^9.6.0", + "chalk": "^4.0.0", + "highlight.js": "^10.0.0", "mz": "^2.4.0", - "parse5": "^4.0.0", - "yargs": "^13.0.0" + "parse5": "^5.1.1", + "parse5-htmlparser2-tree-adapter": "^6.0.0", + "yargs": "^16.0.0" + }, + "dependencies": { + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "chalk": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", + "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "cli-spinners": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.2.0.tgz", - "integrity": "sha512-tgU3fKwzYjiLEQgPMD9Jt+JjHVL9kW93FiIMX/l7rivvOD4/LL0Mf7gda3+4U2KJBloybwgj5KEoQgGRioMiKQ==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.5.0.tgz", + "integrity": "sha512-PC+AmIuK04E6aeSs/pUccSujsTzBhu4HzC2dL+CfJB/Jcc2qTRbEwZQDfIUpt2Xl8BodYBEq8w4fc0kU2I9DjQ==", "dev": true }, "cli-width": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", - "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz", + "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw==", "dev": true }, "clipboardy": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/clipboardy/-/clipboardy-2.1.0.tgz", - "integrity": "sha512-2pzOUxWcLlXWtn+Jd6js3o12TysNOOVes/aQfg+MT/35vrxWzedHlLwyoJpXjsFKWm95BTNEcMGD9+a7mKzZkQ==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/clipboardy/-/clipboardy-2.3.0.tgz", + "integrity": "sha512-mKhiIL2DrQIsuXMgBgnfEHOZOryC7kY7YO//TN6c63wlEm3NG5tz+YgY5rVi29KCmq/QQjKYvM7a19+MDOTHOQ==", "dev": true, "requires": { "arch": "^2.1.1", - "execa": "^1.0.0" + "execa": "^1.0.0", + "is-wsl": "^2.1.1" + }, + "dependencies": { + "is-wsl": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-wsl/-/is-wsl-2.2.0.tgz", + "integrity": "sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==", + "dev": true, + "requires": { + "is-docker": "^2.0.0" + } + } } }, "cliui": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", - "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", "dev": true, "requires": { - "string-width": "^3.1.0", - "strip-ansi": "^5.2.0", - "wrap-ansi": "^5.1.0" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" }, "dependencies": { - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" } } } @@ -2847,13 +3366,6 @@ "integrity": "sha1-2jCcwmPfFZlMaIypAheco8fNfH4=", "dev": true }, - "co": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", - "integrity": "sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ=", - "dev": true, - "optional": true - }, "coa": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/coa/-/coa-2.0.2.tgz", @@ -2865,12 +3377,6 @@ "q": "^1.1.2" } }, - "code-point-at": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", - "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", - "dev": true - }, "collection-visit": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", @@ -2882,13 +3388,13 @@ } }, "color": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/color/-/color-3.1.2.tgz", - "integrity": "sha512-vXTJhHebByxZn3lDvDJYw4lR5+uB3vuoHsuYA5AKuxRVn5wzzIfQKGLBmgdVRHKTJYeK5rvJcHnrd0Li49CFpg==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/color/-/color-3.1.3.tgz", + "integrity": "sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ==", "dev": true, "requires": { "color-convert": "^1.9.1", - "color-string": "^1.5.2" + "color-string": "^1.5.4" } }, "color-convert": { @@ -2907,15 +3413,21 @@ "dev": true }, "color-string": { - "version": "1.5.3", - "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.3.tgz", - "integrity": "sha512-dC2C5qeWoYkxki5UAXapdjqO672AM4vZuPGRQfO8b5HKuKGBbKWpITyDYN7TOFKvRW7kOgAn3746clDBMDJyQw==", + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.4.tgz", + "integrity": "sha512-57yF5yt8Xa3czSEW1jfQDE79Idk0+AkN/4KWad6tbdxUmAs3MvjxlWSWD4deYytcRfoZ9nhKyFl1kj5tBvidbw==", "dev": true, "requires": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, + "colorette": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.2.1.tgz", + "integrity": "sha512-puCDz0CzydiSYOrnXpz/PKd69zRrribezjtE9yd4zvytoRc8+RY/KJPvtPFKZS3E3wP6neGyMe0vOTlHO5L3Pw==", + "dev": true + }, "combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -2926,9 +3438,9 @@ } }, "commander": { - "version": "2.20.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.0.tgz", - "integrity": "sha512-7j2y+40w61zy6YC2iRNpUe/NwhNyoXrYpHMrSunaMG64nRnaf96zO/KMQR4OyN/UnE5KLyEBnKHd4aG3rskjpQ==", + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", "dev": true }, "commondir": { @@ -2944,12 +3456,12 @@ "dev": true }, "compressible": { - "version": "2.0.17", - "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.17.tgz", - "integrity": "sha512-BGHeLCK1GV7j1bSmQQAi26X+GgWcTjLr/0tzSvMCl3LH1w1IJ4PFSPoV5316b30cneTziC+B1a+3OjoSUcQYmw==", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", + "integrity": "sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==", "dev": true, "requires": { - "mime-db": ">= 1.40.0 < 2" + "mime-db": ">= 1.43.0 < 2" } }, "compression": { @@ -3015,13 +3527,10 @@ "dev": true }, "console-browserify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.1.0.tgz", - "integrity": "sha1-8CQcRXMKn8YyOyBtvzjtx0HQuxA=", - "dev": true, - "requires": { - "date-now": "^0.1.4" - } + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/console-browserify/-/console-browserify-1.2.0.tgz", + "integrity": "sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==", + "dev": true }, "consolidate": { "version": "0.15.1", @@ -3054,9 +3563,9 @@ "dev": true }, "convert-source-map": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.6.0.tgz", - "integrity": "sha512-eFu7XigvxdZ1ETfbgPBohgyQ/Z++C0eEhTor0qRwBw9unw+L0/6V8wkSuGgzdThkiS5lSpdptOQPD8Ak40a+7A==", + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", + "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", "dev": true, "requires": { "safe-buffer": "~5.1.1" @@ -3095,51 +3604,64 @@ "dev": true }, "copy-webpack-plugin": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-4.6.0.tgz", - "integrity": "sha512-Y+SQCF+0NoWQryez2zXn5J5knmr9z/9qSQt7fbL78u83rxmigOy8X5+BFn8CFSuX+nKT8gpYwJX68ekqtQt6ZA==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-5.1.2.tgz", + "integrity": "sha512-Uh7crJAco3AjBvgAy9Z75CjK8IG+gxaErro71THQ+vv/bl4HaQcpkexAY8KVW/T6D2W2IRr+couF/knIRkZMIQ==", "dev": true, "requires": { - "cacache": "^10.0.4", - "find-cache-dir": "^1.0.0", + "cacache": "^12.0.3", + "find-cache-dir": "^2.1.0", + "glob-parent": "^3.1.0", "globby": "^7.1.1", - "is-glob": "^4.0.0", - "loader-utils": "^1.1.0", + "is-glob": "^4.0.1", + "loader-utils": "^1.2.3", "minimatch": "^3.0.4", - "p-limit": "^1.0.0", - "serialize-javascript": "^1.4.0" + "normalize-path": "^3.0.0", + "p-limit": "^2.2.1", + "schema-utils": "^1.0.0", + "serialize-javascript": "^4.0.0", + "webpack-log": "^2.0.0" }, "dependencies": { - "cacache": { - "version": "10.0.4", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-10.0.4.tgz", - "integrity": "sha512-Dph0MzuH+rTQzGPNT9fAnrPmMmjKfST6trxJeK7NQuHRaVw24VzPRWTmg9MpcwOVQZO0E1FBICUlFeNaKPIfHA==", + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", "dev": true, "requires": { - "bluebird": "^3.5.1", - "chownr": "^1.0.1", - "glob": "^7.1.2", - "graceful-fs": "^4.1.11", - "lru-cache": "^4.1.1", - "mississippi": "^2.0.0", - "mkdirp": "^0.5.1", - "move-concurrently": "^1.0.1", - "promise-inflight": "^1.0.1", - "rimraf": "^2.6.2", - "ssri": "^5.2.4", - "unique-filename": "^1.1.0", - "y18n": "^4.0.0" + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" } }, - "find-cache-dir": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-1.0.0.tgz", - "integrity": "sha1-kojj6ePMN0hxfTnq3hfPcfww7m8=", + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", "dev": true, "requires": { - "commondir": "^1.0.1", - "make-dir": "^1.0.0", - "pkg-dir": "^2.0.0" + "locate-path": "^3.0.0" + } + }, + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "dev": true, + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "dev": true, + "requires": { + "is-extglob": "^2.1.0" + } + } } }, "globby": { @@ -3154,68 +3676,75 @@ "ignore": "^3.3.5", "pify": "^3.0.0", "slash": "^1.0.0" + }, + "dependencies": { + "pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "dev": true + } } }, - "lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", + "ignore": { + "version": "3.3.10", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", + "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "dev": true + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", "dev": true, "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" } }, "make-dir": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", - "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", "dev": true, "requires": { - "pify": "^3.0.0" + "pify": "^4.0.1", + "semver": "^5.6.0" } }, - "mississippi": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mississippi/-/mississippi-2.0.0.tgz", - "integrity": "sha512-zHo8v+otD1J10j/tC+VNoGK9keCuByhKovAvdn74dmxJl9+mWHnx6EMsDN4lgRoMI/eYo2nchAxniIbUPb5onw==", + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", "dev": true, "requires": { - "concat-stream": "^1.5.0", - "duplexify": "^3.4.2", - "end-of-stream": "^1.1.0", - "flush-write-stream": "^1.0.0", - "from2": "^2.1.0", - "parallel-transform": "^1.1.0", - "pump": "^2.0.1", - "pumpify": "^1.3.3", - "stream-each": "^1.1.0", - "through2": "^2.0.0" + "p-limit": "^2.0.0" } }, - "pify": { + "path-exists": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", - "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", "dev": true }, "pkg-dir": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", - "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", "dev": true, "requires": { - "find-up": "^2.1.0" + "find-up": "^3.0.0" } }, - "pump": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/pump/-/pump-2.0.1.tgz", - "integrity": "sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==", + "schema-utils": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", + "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", "dev": true, "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" + "ajv": "^6.1.0", + "ajv-errors": "^1.0.0", + "ajv-keywords": "^3.1.0" } }, "slash": { @@ -3223,28 +3752,31 @@ "resolved": "https://registry.npmjs.org/slash/-/slash-1.0.0.tgz", "integrity": "sha1-xB8vbDn8FtHNF61LXYlhFK5HDVU=", "dev": true - }, - "ssri": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-5.3.0.tgz", - "integrity": "sha512-XRSIPqLij52MtgoQavH/x/dU1qVKtWUAAZeOHsR9c2Ddi4XerFy3mc1alf+dLJKl9EUIm/Ht+EowFkTUOA6GAQ==", - "dev": true, - "requires": { - "safe-buffer": "^5.1.1" - } - }, - "yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", - "dev": true } } }, "core-js": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-2.6.9.tgz", - "integrity": "sha512-HOpZf6eXmnl7la+cUdMnLvUxKNqLUzJvgIziQ0DiF3JwSImNphIqdGqzj6hIKyX04MmV0poclQ7+wjWvxQyR2A==" + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.8.3.tgz", + "integrity": "sha512-KPYXeVZYemC2TkNEkX/01I+7yd+nX3KddKwZ1Ww7SKWdI2wQprSgLmrTddT8nw92AjEklTsPBoSdQBhbI1bQ6Q==" + }, + "core-js-compat": { + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.8.3.tgz", + "integrity": "sha512-1sCb0wBXnBIL16pfFG1Gkvei6UzvKyTNYpiC41yrdjEv0UoJoq9E/abTMzyYJ6JpTkAj15dLjbqifIzEBDVvog==", + "dev": true, + "requires": { + "browserslist": "^4.16.1", + "semver": "7.0.0" + }, + "dependencies": { + "semver": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.0.0.tgz", + "integrity": "sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==", + "dev": true + } + } }, "core-util-is": { "version": "1.0.2", @@ -3262,16 +3794,36 @@ "is-directory": "^0.3.1", "js-yaml": "^3.13.1", "parse-json": "^4.0.0" + }, + "dependencies": { + "parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "dev": true, + "requires": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + } + } } }, "create-ecdh": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.3.tgz", - "integrity": "sha512-GbEHQPMOswGpKXM9kCWVrremUcBmjteUaQ01T9rkKCPDXfUHX0IoP9LpHYo2NPFampa4e+/pFDc3jQdxrxQLaw==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/create-ecdh/-/create-ecdh-4.0.4.tgz", + "integrity": "sha512-mf+TCx8wWc9VpuxfP2ht0iSISLZnt0JgWlrOKZiNqyUZWnjIaCIVNQArMHnCZKfEYRg6IM7A+NeJoN8gf/Ws0A==", "dev": true, "requires": { "bn.js": "^4.1.0", - "elliptic": "^6.0.0" + "elliptic": "^6.5.3" + }, + "dependencies": { + "bn.js": { + "version": "4.11.9", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", + "integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==", + "dev": true + } } }, "create-hash": { @@ -3350,58 +3902,48 @@ } }, "css-loader": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-1.0.1.tgz", - "integrity": "sha512-+ZHAZm/yqvJ2kDtPne3uX0C+Vr3Zn5jFn2N4HywtS5ujwvsVkyg0VArEXpl3BgczDA8anieki1FIzhchX4yrDw==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/css-loader/-/css-loader-3.6.0.tgz", + "integrity": "sha512-M5lSukoWi1If8dhQAUCvj4H8vUt3vOnwbQBH9DdTm/s4Ym2B/3dPMtYZeJmq7Q3S3Pa+I94DcZ7pc9bP14cWIQ==", "dev": true, "requires": { - "babel-code-frame": "^6.26.0", - "css-selector-tokenizer": "^0.7.0", - "icss-utils": "^2.1.0", - "loader-utils": "^1.0.2", - "lodash": "^4.17.11", - "postcss": "^6.0.23", - "postcss-modules-extract-imports": "^1.2.0", - "postcss-modules-local-by-default": "^1.2.0", - "postcss-modules-scope": "^1.1.0", - "postcss-modules-values": "^1.3.0", - "postcss-value-parser": "^3.3.0", - "source-list-map": "^2.0.0" + "camelcase": "^5.3.1", + "cssesc": "^3.0.0", + "icss-utils": "^4.1.1", + "loader-utils": "^1.2.3", + "normalize-path": "^3.0.0", + "postcss": "^7.0.32", + "postcss-modules-extract-imports": "^2.0.0", + "postcss-modules-local-by-default": "^3.0.2", + "postcss-modules-scope": "^2.2.0", + "postcss-modules-values": "^3.0.0", + "postcss-value-parser": "^4.1.0", + "schema-utils": "^2.7.0", + "semver": "^6.3.0" }, "dependencies": { - "postcss": { - "version": "6.0.23", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", - "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", - "dev": true, - "requires": { - "chalk": "^2.4.1", - "source-map": "^0.6.1", - "supports-color": "^5.4.0" - } + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true } } }, - "css-parse": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/css-parse/-/css-parse-1.7.0.tgz", - "integrity": "sha1-Mh9s9zeCpv91ERE5D8BeLGV9jJs=", - "dev": true - }, "css-select": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-2.0.2.tgz", - "integrity": "sha512-dSpYaDVoWaELjvZ3mS6IKZM/y2PMPa/XYoEfYNZePL4U/XgyxZNroHEHReDx/d+VgXh9VbCTtFqLkFbmeqeaRQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-2.1.0.tgz", + "integrity": "sha512-Dqk7LQKpwLoH3VovzZnkzegqNSuAziQyNZUcrdDM401iY+R5NkGBXGmtO05/yaXQziALuPogeG0b7UAgjnTJTQ==", "dev": true, "requires": { "boolbase": "^1.0.0", - "css-what": "^2.1.2", + "css-what": "^3.2.1", "domutils": "^1.7.0", "nth-check": "^1.0.2" } @@ -3412,89 +3954,34 @@ "integrity": "sha512-jQVeeRG70QI08vSTwf1jHxp74JoZsr2XSgETae8/xC8ovSnL2WF87GTLO86Sbwdt2lK4Umg4HnnwMO4YF3Ce7w==", "dev": true }, - "css-selector-tokenizer": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/css-selector-tokenizer/-/css-selector-tokenizer-0.7.1.tgz", - "integrity": "sha512-xYL0AMZJ4gFzJQsHUKa5jiWWi2vH77WVNg7JYRyewwj6oPh4yb/y6Y9ZCw9dsj/9UauMhtuxR+ogQd//EdEVNA==", + "css-tree": { + "version": "1.0.0-alpha.37", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0-alpha.37.tgz", + "integrity": "sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg==", "dev": true, "requires": { - "cssesc": "^0.1.0", - "fastparse": "^1.1.1", - "regexpu-core": "^1.0.0" + "mdn-data": "2.0.4", + "source-map": "^0.6.1" }, "dependencies": { - "cssesc": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-0.1.0.tgz", - "integrity": "sha1-yBSQPkViM3GgR3tAEJqq++6t27Q=", - "dev": true - }, - "jsesc": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", - "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=", - "dev": true - }, - "regexpu-core": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-1.0.0.tgz", - "integrity": "sha1-hqdj9Y7k18L2sQLkdkBQ3n7ZDGs=", - "dev": true, - "requires": { - "regenerate": "^1.2.1", - "regjsgen": "^0.2.0", - "regjsparser": "^0.1.4" - } - }, - "regjsgen": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.2.0.tgz", - "integrity": "sha1-bAFq3qxVT3WCP+N6wFuS1aTtsfc=", + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "dev": true - }, - "regjsparser": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.1.5.tgz", - "integrity": "sha1-fuj4Tcb6eS0/0K4ijSS9lJ6tIFw=", - "dev": true, - "requires": { - "jsesc": "~0.5.0" - } } } }, - "css-tree": { - "version": "1.0.0-alpha.28", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0-alpha.28.tgz", - "integrity": "sha512-joNNW1gCp3qFFzj4St6zk+Wh/NBv0vM5YbEreZk0SD4S23S+1xBKb6cLDg2uj4P4k/GUMlIm6cKIDqIG+vdt0w==", - "dev": true, - "requires": { - "mdn-data": "~1.1.0", - "source-map": "^0.5.3" - } - }, - "css-unit-converter": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/css-unit-converter/-/css-unit-converter-1.1.1.tgz", - "integrity": "sha1-2bkoGtz9jO2TW9urqDeGiX9k6ZY=", - "dev": true - }, - "css-url-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/css-url-regex/-/css-url-regex-1.1.0.tgz", - "integrity": "sha1-g4NCMMyfdMRX3lnuvRVD/uuDt+w=", - "dev": true - }, "css-what": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-2.1.3.tgz", - "integrity": "sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg==", + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-3.4.2.tgz", + "integrity": "sha512-ACUm3L0/jiZTqfzRM3Hi9Q8eZqd6IK37mMWPLz9PJxkLWllYeRf+EHUSHYEtFop2Eqytaq1FizFVh7XfBnXCDQ==", "dev": true }, "cssesc": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-2.0.0.tgz", - "integrity": "sha512-MsCAG1z9lPdoO/IUMLSBWBSVxVtJ1395VGIQ+Fc2gNdkQ1hNDnQdw3YhA71WJCBW1vdwA0cAnk/DnW6bqoEUYg==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", "dev": true }, "cssnano": { @@ -3575,36 +4062,42 @@ "dev": true }, "csso": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/csso/-/csso-3.5.1.tgz", - "integrity": "sha512-vrqULLffYU1Q2tLdJvaCYbONStnfkfimRxXNaGjxMldI0C7JPBC4rB1RyjhfdZ4m1frm8pM9uRPKH3d2knZ8gg==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/csso/-/csso-4.2.0.tgz", + "integrity": "sha512-wvlcdIbf6pwKEk7vHj8/Bkc0B4ylXZruLvOgs9doS5eOsOpuodOV2zJChSpkp+pRpYQLQMeF04nr3Z68Sta9jA==", "dev": true, "requires": { - "css-tree": "1.0.0-alpha.29" + "css-tree": "^1.1.2" }, "dependencies": { "css-tree": { - "version": "1.0.0-alpha.29", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.0.0-alpha.29.tgz", - "integrity": "sha512-sRNb1XydwkW9IOci6iB2xmy8IGCj6r/fr+JWitvJ2JxQRPzN3T4AGGVWCMlVmVwM1gtgALJRmGIlWv5ppnGGkg==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-1.1.2.tgz", + "integrity": "sha512-wCoWush5Aeo48GLhfHPbmvZs59Z+M7k5+B1xDnXbdWNcEF423DoFdqSWE0PM5aNk5nI5cp1q7ms36zGApY/sKQ==", "dev": true, "requires": { - "mdn-data": "~1.1.0", - "source-map": "^0.5.3" + "mdn-data": "2.0.14", + "source-map": "^0.6.1" } + }, + "mdn-data": { + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.14.tgz", + "integrity": "sha512-dn6wd0uw5GsdswPFfsgMp5NSB0/aDe6fK94YJV/AJDYXL6HVLWBsxeq7js7Ad+mU2K9LAlwpk6kN2D5mwCPVow==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true } } }, - "current-script-polyfill": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/current-script-polyfill/-/current-script-polyfill-1.0.0.tgz", - "integrity": "sha1-8xz35PPiGLBybnOMqSoC00iO9hU=", - "dev": true - }, "cyclist": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/cyclist/-/cyclist-0.2.2.tgz", - "integrity": "sha1-GzN5LhHpFKL9bW7WRHRkRE5fpkA=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/cyclist/-/cyclist-1.0.1.tgz", + "integrity": "sha1-WW6WmP0MgOEgOMK4LW6xs1tiJNk=", "dev": true }, "dashdash": { @@ -3616,12 +4109,6 @@ "assert-plus": "^1.0.0" } }, - "date-now": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/date-now/-/date-now-0.1.4.tgz", - "integrity": "sha1-6vQ5/U1ISK105cx9vvIAZyueNFs=", - "dev": true - }, "de-indent": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/de-indent/-/de-indent-1.0.2.tgz", @@ -3629,12 +4116,21 @@ "dev": true }, "debug": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", - "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "dev": true, + "requires": { + "ms": "2.1.2" + } + }, + "decache": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/decache/-/decache-4.6.0.tgz", + "integrity": "sha512-PppOuLiz+DFeaUvFXEYZjLxAkKiMYH/do/b/MxpDe/8AgKBi5GhZxridoVIbBq72GDbL36e4p0Ce2jTGUwwU+w==", "dev": true, "requires": { - "ms": "^2.1.1" + "callsite": "^1.0.0" } }, "decamelize": { @@ -3650,10 +4146,18 @@ "dev": true }, "deep-equal": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.0.1.tgz", - "integrity": "sha1-9dJgKStmDghO/0zbyfCK0yR0SLU=", - "dev": true + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-1.1.1.tgz", + "integrity": "sha512-yd9c5AdiqVcR+JjcwUQb9DkhJc8ngNr0MahEBGvDiJw8puWab2yZlh+nkasOnZP+EGTAP6rRp2JzJhJZzvNF8g==", + "dev": true, + "requires": { + "is-arguments": "^1.0.4", + "is-date-object": "^1.0.1", + "is-regex": "^1.0.4", + "object-is": "^1.0.1", + "object-keys": "^1.1.1", + "regexp.prototype.flags": "^1.2.0" + } }, "deep-is": { "version": "0.1.3", @@ -3668,13 +4172,118 @@ "dev": true }, "default-gateway": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-5.0.2.tgz", - "integrity": "sha512-wXuT0q8T5vxQNecrTgz/KbU2lPUMRc98I9Y5dnH3yhFB3BGYqtADK4lhivLlG0OfjhmfKx1PGILG2jR4zjI+WA==", + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-5.0.5.tgz", + "integrity": "sha512-z2RnruVmj8hVMmAnEJMTIJNijhKCDiGjbLP+BHJFOT7ld3Bo5qcIBpVYDniqhbMIIf+jZDlkP2MkPXiQy/DBLA==", "dev": true, "requires": { - "execa": "^1.0.0", - "ip-regex": "^2.1.0" + "execa": "^3.3.0" + }, + "dependencies": { + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "execa": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/execa/-/execa-3.4.0.tgz", + "integrity": "sha512-r9vdGQk4bmCuK1yKQu1KTwcT2zwfWdbdaXfCtAh+5nU/4fSX+JAb7vZGvI5naJrQlvONrEB20jeruESI69530g==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.0", + "get-stream": "^5.0.0", + "human-signals": "^1.1.1", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.0", + "onetime": "^5.1.0", + "p-finally": "^2.0.0", + "signal-exit": "^3.0.2", + "strip-final-newline": "^2.0.0" + } + }, + "get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "requires": { + "pump": "^3.0.0" + } + }, + "is-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", + "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", + "dev": true + }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true + }, + "npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "requires": { + "path-key": "^3.0.0" + } + }, + "onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "p-finally": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-2.0.1.tgz", + "integrity": "sha512-vpm09aKwq6H9phqRQzecoDpD8TmVyGw70qmWlyq5onxY7tqyTTFVvxMykxQSQKILBSFlbXpypIw2T1Ml7+DDtw==", + "dev": true + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + } } }, "defaults": { @@ -3771,6 +4380,12 @@ "dev": true } } + }, + "p-map": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", + "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==", + "dev": true } } }, @@ -3787,9 +4402,9 @@ "dev": true }, "des.js": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.0.0.tgz", - "integrity": "sha1-wHTS4qpqipoH29YfmhXCzYPsjsw=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/des.js/-/des.js-1.0.1.tgz", + "integrity": "sha512-Q0I4pfFrv2VPd34/vfLrFOoRmlYj3OV50i7fskps1jZWK1kApMWWT9G6RRUeYedLcBDIhnSDaUvJMb3AhUlaEA==", "dev": true, "requires": { "inherits": "^2.0.1", @@ -3817,6 +4432,14 @@ "bn.js": "^4.1.0", "miller-rabin": "^4.0.0", "randombytes": "^2.0.0" + }, + "dependencies": { + "bn.js": { + "version": "4.11.9", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", + "integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==", + "dev": true + } } }, "dir-glob": { @@ -3854,11 +4477,10 @@ } }, "doctrine": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz", - "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", "dev": true, - "optional": true, "requires": { "esutils": "^2.0.2" } @@ -3873,13 +4495,21 @@ } }, "dom-serializer": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.1.tgz", - "integrity": "sha512-l0IU0pPzLWSHBcieZbpOKgkIn3ts3vAh7ZuFyXNwJxJXk/c4Gwj9xaTJwIDVQCXawWD0qb3IzMGH5rglQaO0XA==", + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.2.2.tgz", + "integrity": "sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g==", "dev": true, "requires": { - "domelementtype": "^1.3.0", - "entities": "^1.1.1" + "domelementtype": "^2.0.1", + "entities": "^2.0.0" + }, + "dependencies": { + "domelementtype": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.1.0.tgz", + "integrity": "sha512-LsTgx/L5VpD+Q8lmsXSHW2WpA+eBlZ9HPf3erD1IoPF00/3JKHZ3BknUVA2QGDNu69ZNmyFmCWBSO45XjYKC5w==", + "dev": true + } } }, "domain-browser": { @@ -3914,18 +4544,18 @@ } }, "dot-prop": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-4.2.0.tgz", - "integrity": "sha512-tUMXrxlExSW6U2EXiiKGSBVdYgtV8qlHL+C10TsW4PURY/ic+eaysnSkwB4kA/mBlCyy/IKDJ+Lc3wbWeaXtuQ==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", "dev": true, "requires": { - "is-obj": "^1.0.0" + "is-obj": "^2.0.0" } }, "dotenv": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-7.0.0.tgz", - "integrity": "sha512-M3NhsLbV1i6HuGzBUH8vXrtxOk+tWmzWKDMbAVSUp3Zsjm7ywFeuwrUXhmhQyRK1q5B5GGy7hcXPbj3bnfZg2g==", + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz", + "integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw==", "dev": true }, "dotenv-expand": { @@ -3935,9 +4565,9 @@ "dev": true }, "duplexer": { - "version": "0.1.1", - "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.1.tgz", - "integrity": "sha1-rOb/gIwc5mtX0ev5eXessCM0z8E=", + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", + "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==", "dev": true }, "duplexify": { @@ -3953,9 +4583,9 @@ } }, "easy-stack": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/easy-stack/-/easy-stack-1.0.0.tgz", - "integrity": "sha1-EskbMIWjfwuqM26UhurEv5Tj54g=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/easy-stack/-/easy-stack-1.0.1.tgz", + "integrity": "sha512-wK2sCs4feiiJeFXn3zvY0p41mdU5VUgbgs1rNsc/y5ngFUijdWd+iIN8eoyuZHKB8xN6BL4PdWmzqFmxNg6V2w==", "dev": true }, "ecc-jsbn": { @@ -3975,42 +4605,50 @@ "dev": true }, "ejs": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/ejs/-/ejs-2.6.2.tgz", - "integrity": "sha512-PcW2a0tyTuPHz3tWyYqtK6r1fZ3gp+3Sop8Ph+ZYN81Ob5rwmbHEzaqs10N3BEsaGTkh/ooniXK+WwszGlc2+Q==", + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-2.7.4.tgz", + "integrity": "sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==", "dev": true }, "electron-to-chromium": { - "version": "1.3.190", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.190.tgz", - "integrity": "sha512-cs9WnTnGBGnYYVFMCtLmr9jXNTOkdp95RLz5VhwzDn7dErg1Lnt9o4d01gEH69XlmRKWUr91Yu1hA+Hi8qW0PA==", + "version": "1.3.653", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.653.tgz", + "integrity": "sha512-LehOhcl74u9fkV9Un6WahJ+Xh+0FZLCCDnKYis1Olx1DX2ugRww5PJicE65OG8yznMj8EOQZRcz6FSV1xKxqsA==", "dev": true }, "elliptic": { - "version": "6.5.3", - "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.3.tgz", - "integrity": "sha512-IMqzv5wNQf+E6aHeIqATs0tOLeOTwj1QKbRcS3jBbYkl5oLAserA8yJTT7/VyHUYG91PRmPyeQDObKLPpeS4dw==", + "version": "6.5.4", + "resolved": "https://registry.npmjs.org/elliptic/-/elliptic-6.5.4.tgz", + "integrity": "sha512-iLhC6ULemrljPZb+QutR5TQGB+pdW6KGD5RSegS+8sorOZT+rdQFbsQFJgvN3eRqNALqJer4oQ16YvJHlU8hzQ==", "dev": true, "requires": { - "bn.js": "^4.4.0", - "brorand": "^1.0.1", + "bn.js": "^4.11.9", + "brorand": "^1.1.0", "hash.js": "^1.0.0", - "hmac-drbg": "^1.0.0", - "inherits": "^2.0.1", - "minimalistic-assert": "^1.0.0", - "minimalistic-crypto-utils": "^1.0.0" + "hmac-drbg": "^1.0.1", + "inherits": "^2.0.4", + "minimalistic-assert": "^1.0.1", + "minimalistic-crypto-utils": "^1.0.1" + }, + "dependencies": { + "bn.js": { + "version": "4.11.9", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", + "integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==", + "dev": true + } } }, "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", "dev": true }, "emojis-list": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-2.1.0.tgz", - "integrity": "sha1-TapNnbAPmBmIDHn6RXrlsJof04k=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", + "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", "dev": true }, "encodeurl": { @@ -4020,35 +4658,47 @@ "dev": true }, "end-of-stream": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", - "integrity": "sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q==", + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "dev": true, "requires": { "once": "^1.4.0" } }, "enhanced-resolve": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz", - "integrity": "sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng==", + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-4.5.0.tgz", + "integrity": "sha512-Nv9m36S/vxpsI+Hc4/ZGRs0n9mXqSWGGq49zxb/cJfPAQMbUtttJAlNPS4AQzaBdw/pKskw5bMbekT/Y7W/Wlg==", "dev": true, "requires": { "graceful-fs": "^4.1.2", - "memory-fs": "^0.4.0", + "memory-fs": "^0.5.0", "tapable": "^1.0.0" + }, + "dependencies": { + "memory-fs": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.5.0.tgz", + "integrity": "sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA==", + "dev": true, + "requires": { + "errno": "^0.1.3", + "readable-stream": "^2.0.1" + } + } } }, "entities": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz", - "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.2.0.tgz", + "integrity": "sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==", "dev": true }, "errno": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.7.tgz", - "integrity": "sha512-MfrRBDWzIWifgq6tJj60gkAwtLNb6sQPlcFrSOflcP1aFmmruKQ2wRnze/8V6kgyz7H3FF8Npzv78mZ7XLLflg==", + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", + "integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==", "dev": true, "requires": { "prr": "~1.0.1" @@ -4064,32 +4714,40 @@ } }, "error-stack-parser": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/error-stack-parser/-/error-stack-parser-2.0.2.tgz", - "integrity": "sha512-E1fPutRDdIj/hohG0UpT5mayXNCxXP9d+snxFsPU9X0XgccOumKraa3juDMwTUyi7+Bu5+mCGagjg4IYeNbOdw==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/error-stack-parser/-/error-stack-parser-2.0.6.tgz", + "integrity": "sha512-d51brTeqC+BHlwF0BhPtcYgF5nlzf9ZZ0ZIUQNZpc9ZB9qw5IJ2diTrBY9jlCJkTLITYPjmiX6OWCwH+fuyNgQ==", "dev": true, "requires": { - "stackframe": "^1.0.4" + "stackframe": "^1.1.1" } }, "es-abstract": { - "version": "1.13.0", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.13.0.tgz", - "integrity": "sha512-vDZfg/ykNxQVwup/8E1BZhVzFfBxs9NqMzGcvIJrqg5k2/5Za2bWo40dK2J1pgLngZ7c+Shh8lwYtLGyrwPutg==", + "version": "1.18.0-next.2", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.2.tgz", + "integrity": "sha512-Ih4ZMFHEtZupnUh6497zEL4y2+w8+1ljnCyaTa+adcoafI1GOvMwFlDjBLfWR7y9VLfrjRJe9ocuHY1PSR9jjw==", "dev": true, "requires": { - "es-to-primitive": "^1.2.0", + "call-bind": "^1.0.2", + "es-to-primitive": "^1.2.1", "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.2", "has": "^1.0.3", - "is-callable": "^1.1.4", - "is-regex": "^1.0.4", - "object-keys": "^1.0.12" + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.1", + "is-regex": "^1.1.1", + "object-inspect": "^1.9.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.2", + "string.prototype.trimend": "^1.0.3", + "string.prototype.trimstart": "^1.0.3" } }, "es-to-primitive": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.0.tgz", - "integrity": "sha512-qZryBOJjV//LaxLTV6UC//WewneB3LcXOL9NP++ozKVXsIIIpm/2c13UDiD9Jp2eThsecw9m3jPqDwTyobcdbg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", "dev": true, "requires": { "is-callable": "^1.1.4", @@ -4097,6 +4755,12 @@ "is-symbol": "^1.0.2" } }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -4110,248 +4774,105 @@ "dev": true }, "eslint": { - "version": "5.16.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-5.16.0.tgz", - "integrity": "sha512-S3Rz11i7c8AA5JPv7xAH+dOyq/Cu/VXHiHXBPOU1k/JAM5dXqQPt3qcrhpHSorXmrpu2g0gkIBVXAqCpzfoZIg==", + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.8.0.tgz", + "integrity": "sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig==", "dev": true, "requires": { "@babel/code-frame": "^7.0.0", - "ajv": "^6.9.1", + "ajv": "^6.10.0", "chalk": "^2.1.0", "cross-spawn": "^6.0.5", "debug": "^4.0.1", "doctrine": "^3.0.0", - "eslint-scope": "^4.0.3", - "eslint-utils": "^1.3.1", - "eslint-visitor-keys": "^1.0.0", - "espree": "^5.0.1", + "eslint-scope": "^5.0.0", + "eslint-utils": "^1.4.3", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.1.2", "esquery": "^1.0.1", "esutils": "^2.0.2", "file-entry-cache": "^5.0.1", "functional-red-black-tree": "^1.0.1", - "glob": "^7.1.2", - "globals": "^11.7.0", + "glob-parent": "^5.0.0", + "globals": "^12.1.0", "ignore": "^4.0.6", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", - "inquirer": "^6.2.2", - "js-yaml": "^3.13.0", + "inquirer": "^7.0.0", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.3.0", - "lodash": "^4.17.11", + "lodash": "^4.17.14", "minimatch": "^3.0.4", "mkdirp": "^0.5.1", "natural-compare": "^1.4.0", - "optionator": "^0.8.2", - "path-is-inside": "^1.0.2", + "optionator": "^0.8.3", "progress": "^2.0.0", "regexpp": "^2.0.1", - "semver": "^5.5.1", - "strip-ansi": "^4.0.0", - "strip-json-comments": "^2.0.1", + "semver": "^6.1.2", + "strip-ansi": "^5.2.0", + "strip-json-comments": "^3.0.1", "table": "^5.2.3", - "text-table": "^0.2.0" + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" }, "dependencies": { - "acorn": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.2.0.tgz", - "integrity": "sha512-8oe72N3WPMjA+2zVG71Ia0nXZ8DpQH+QyyHO+p06jT8eg8FGG3FbcUIi8KziHlAfheJQZeoqbvq1mQSQHXKYLw==", - "dev": true - }, - "acorn-jsx": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.0.1.tgz", - "integrity": "sha512-HJ7CfNHrfJLlNTzIEUTj43LNWGkqpRLxm3YjAlcD0ACydk9XynzYsCBHxut+iqt+1aBXkx9UP/w/ZqMr13XIzg==", - "dev": true - }, - "chardet": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", - "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", - "dev": true - }, - "doctrine": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", - "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", - "dev": true, - "requires": { - "esutils": "^2.0.2" - } - }, - "espree": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-5.0.1.tgz", - "integrity": "sha512-qWAZcWh4XE/RwzLJejfcofscgMc9CamR6Tn1+XRXNzrvUSSbiAjGOI/fggztjIi7y9VLPqnICMIPiGyr8JaZ0A==", - "dev": true, - "requires": { - "acorn": "^6.0.7", - "acorn-jsx": "^5.0.0", - "eslint-visitor-keys": "^1.0.0" - } - }, - "external-editor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", - "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", - "dev": true, - "requires": { - "chardet": "^0.7.0", - "iconv-lite": "^0.4.24", - "tmp": "^0.0.33" - } - }, - "file-entry-cache": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", - "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "dev": true, "requires": { - "flat-cache": "^2.0.1" + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" } }, - "flat-cache": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", - "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "globals": { + "version": "12.4.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-12.4.0.tgz", + "integrity": "sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg==", "dev": true, "requires": { - "flatted": "^2.0.0", - "rimraf": "2.6.3", - "write": "1.0.3" + "type-fest": "^0.8.1" } }, - "ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "dev": true - }, "import-fresh": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.1.0.tgz", - "integrity": "sha512-PpuksHKGt8rXfWEr9m9EHIpgyyaltBy8+eF6GJM0QCAxMgxCfucMF3mjecK2QsJr0amJW7gTqh5/wht0z2UhEQ==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", "dev": true, "requires": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, - "inquirer": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-6.5.0.tgz", - "integrity": "sha512-scfHejeG/lVZSpvCXpsB4j/wQNPM5JC8kiElOI0OUTwmc1RTpXr4H32/HOlQHcZiYl2z2VElwuCVDRG8vFmbnA==", - "dev": true, - "requires": { - "ansi-escapes": "^3.2.0", - "chalk": "^2.4.2", - "cli-cursor": "^2.1.0", - "cli-width": "^2.0.0", - "external-editor": "^3.0.3", - "figures": "^2.0.0", - "lodash": "^4.17.12", - "mute-stream": "0.0.7", - "run-async": "^2.2.0", - "rxjs": "^6.4.0", - "string-width": "^2.1.0", - "strip-ansi": "^5.1.0", - "through": "^2.3.6" - }, - "dependencies": { - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } - } - }, - "regexpp": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", - "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", - "dev": true - }, "resolve-from": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", "dev": true }, - "slice-ansi": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", - "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", - "dev": true, - "requires": { - "ansi-styles": "^3.2.0", - "astral-regex": "^1.0.0", - "is-fullwidth-code-point": "^2.0.0" - } + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true }, "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", "dev": true, "requires": { - "ansi-regex": "^3.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - } - } - }, - "table": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/table/-/table-5.4.1.tgz", - "integrity": "sha512-E6CK1/pZe2N75rGZQotFOdmzWQ1AILtgYbMAbAjvms0S1l5IDB47zG3nCnFGB/w+7nB3vKofbLXCH7HPBo864w==", - "dev": true, - "requires": { - "ajv": "^6.9.1", - "lodash": "^4.17.11", - "slice-ansi": "^2.1.0", - "string-width": "^3.0.0" - }, - "dependencies": { - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } + "ansi-regex": "^4.1.0" } }, - "write": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", - "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", - "dev": true, - "requires": { - "mkdirp": "^0.5.1" - } + "type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true } } }, @@ -4369,51 +4890,14 @@ } }, "eslint-plugin-vue": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/eslint-plugin-vue/-/eslint-plugin-vue-5.2.3.tgz", - "integrity": "sha512-mGwMqbbJf0+VvpGR5Lllq0PMxvTdrZ/ZPjmhkacrCHbubJeJOt+T6E3HUzAifa2Mxi7RSdJfC9HFpOeSYVMMIw==", + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-vue/-/eslint-plugin-vue-6.2.2.tgz", + "integrity": "sha512-Nhc+oVAHm0uz/PkJAWscwIT4ijTrK5fqNqz9QB1D35SbbuMG1uB6Yr5AJpvPSWg+WOw7nYNswerYh0kOk64gqQ==", "dev": true, "requires": { - "vue-eslint-parser": "^5.0.0" - }, - "dependencies": { - "acorn": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.2.0.tgz", - "integrity": "sha512-8oe72N3WPMjA+2zVG71Ia0nXZ8DpQH+QyyHO+p06jT8eg8FGG3FbcUIi8KziHlAfheJQZeoqbvq1mQSQHXKYLw==", - "dev": true - }, - "acorn-jsx": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.0.1.tgz", - "integrity": "sha512-HJ7CfNHrfJLlNTzIEUTj43LNWGkqpRLxm3YjAlcD0ACydk9XynzYsCBHxut+iqt+1aBXkx9UP/w/ZqMr13XIzg==", - "dev": true - }, - "espree": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/espree/-/espree-4.1.0.tgz", - "integrity": "sha512-I5BycZW6FCVIub93TeVY1s7vjhP9CY6cXCznIRfiig7nRviKZYdRnj/sHEWC6A7WE9RDWOFq9+7OsWSYz8qv2w==", - "dev": true, - "requires": { - "acorn": "^6.0.2", - "acorn-jsx": "^5.0.0", - "eslint-visitor-keys": "^1.0.0" - } - }, - "vue-eslint-parser": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-5.0.0.tgz", - "integrity": "sha512-JlHVZwBBTNVvzmifwjpZYn0oPWH2SgWv5dojlZBsrhablDu95VFD+hriB1rQGwbD+bms6g+rAFhQHk6+NyiS6g==", - "dev": true, - "requires": { - "debug": "^4.1.0", - "eslint-scope": "^4.0.0", - "eslint-visitor-keys": "^1.0.0", - "espree": "^4.1.0", - "esquery": "^1.0.1", - "lodash": "^4.17.11" - } - } + "natural-compare": "^1.4.0", + "semver": "^5.6.0", + "vue-eslint-parser": "^7.0.0" } }, "eslint-scope": { @@ -4433,31 +4917,31 @@ "dev": true, "requires": { "eslint-visitor-keys": "^1.1.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz", - "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==", - "dev": true - } } }, "eslint-visitor-keys": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", - "integrity": "sha512-qzm/XxIbxm/FHyH341ZrbnMUpe+5Bocte9xkmFMzPMjRaZMcXww+MpBptFvtU+79L362nqiLhekCxCxDPaUMBQ==", + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", "dev": true }, "espree": { - "version": "3.5.4", - "resolved": "https://registry.npmjs.org/espree/-/espree-3.5.4.tgz", - "integrity": "sha512-yAcIQxtmMiB/jL32dzEp2enBeidsB7xWPLNiw3IIkpVds1P+h7qF9YwJq1yUNzp2OKXgAprs4F61ih66UsoD1A==", + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.2.1.tgz", + "integrity": "sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw==", "dev": true, - "optional": true, "requires": { - "acorn": "^5.5.0", - "acorn-jsx": "^3.0.0" + "acorn": "^7.1.1", + "acorn-jsx": "^5.2.0", + "eslint-visitor-keys": "^1.1.0" + }, + "dependencies": { + "acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true + } } }, "esprima": { @@ -4467,33 +4951,49 @@ "dev": true }, "esquery": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.0.1.tgz", - "integrity": "sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA==", + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.3.1.tgz", + "integrity": "sha512-olpvt9QG0vniUBZspVRN6lwB7hOZoTRtT+jzR+tS4ffYx2mzbw+z0XCOk44aaLYKApNX5nMm+E+P6o25ip/DHQ==", "dev": true, "requires": { - "estraverse": "^4.0.0" + "estraverse": "^5.1.0" + }, + "dependencies": { + "estraverse": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", + "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "dev": true + } } }, "esrecurse": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", - "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dev": true, "requires": { - "estraverse": "^4.1.0" + "estraverse": "^5.2.0" + }, + "dependencies": { + "estraverse": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", + "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "dev": true + } } }, "estraverse": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz", - "integrity": "sha1-De4/7TH81GlhjOc0IJn8GvoL2xM=", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", "dev": true }, "esutils": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.2.tgz", - "integrity": "sha1-Cr9PHKpbyx96nYrMbepPqqBLrJs=", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "dev": true }, "etag": { @@ -4508,10 +5008,16 @@ "integrity": "sha512-z7IyloorXvKbFx9Bpie2+vMJKKx1fH1EN5yiTfp8CiLOTptSYy1g8H4yDpGlEdshL1PBiFtBHepF2cNsqeEeFQ==", "dev": true }, + "eventemitter3": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", + "dev": true + }, "events": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/events/-/events-3.0.0.tgz", - "integrity": "sha512-Dc381HFWJzEOhQ+d8pkNon++bk9h6cdAoAj4iE6Q4y6xgTzySWXlKn05/TVNpjnfRqi/X0EpJEJohPjNI3zpVA==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.2.0.tgz", + "integrity": "sha512-/46HWwbfCX2xTawVfkKLGxMifJYQBWMwY1mjywRtb4c9x8l5NP3KoJtnIOiL1hfdRkIuYhETxQlo62IF8tcnlg==", "dev": true }, "eventsource": { @@ -4687,14 +5193,13 @@ } }, "external-editor": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-2.2.0.tgz", - "integrity": "sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", "dev": true, - "optional": true, "requires": { - "chardet": "^0.4.0", - "iconv-lite": "^0.4.17", + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", "tmp": "^0.0.33" } }, @@ -4770,9 +5275,9 @@ "dev": true }, "fast-deep-equal": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", - "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", "dev": true }, "fast-glob": { @@ -4787,12 +5292,35 @@ "is-glob": "^4.0.0", "merge2": "^1.2.3", "micromatch": "^3.1.10" + }, + "dependencies": { + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "dev": true, + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "dev": true, + "requires": { + "is-extglob": "^2.1.0" + } + } + } + } } }, "fast-json-stable-stringify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", - "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", "dev": true }, "fast-levenshtein": { @@ -4801,68 +5329,47 @@ "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", "dev": true }, - "fastparse": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/fastparse/-/fastparse-1.1.2.tgz", - "integrity": "sha512-483XLLxTVIwWK3QTrMGRqUfUpoOs/0hbQrl2oz4J0pAcm3A3bu84wxTFqGqkJzewCLdME38xJLJAxBABfQT8sQ==", - "dev": true - }, "faye-websocket": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.10.0.tgz", - "integrity": "sha1-TkkvjQTftviQA1B/btvy1QHnxvQ=", + "version": "0.11.3", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.3.tgz", + "integrity": "sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA==", "dev": true, "requires": { "websocket-driver": ">=0.5.1" } }, "figgy-pudding": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/figgy-pudding/-/figgy-pudding-3.5.1.tgz", - "integrity": "sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w==", + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/figgy-pudding/-/figgy-pudding-3.5.2.tgz", + "integrity": "sha512-0btnI/H8f2pavGMN8w40mlSKOfTK2SVJmBfBeVIj3kNw0swwgzyRq0d5TJVOwodFmtvpPeWPN/MCcfuWF0Ezbw==", "dev": true }, "figures": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz", - "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz", + "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==", "dev": true, "requires": { "escape-string-regexp": "^1.0.5" } }, "file-entry-cache": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-2.0.0.tgz", - "integrity": "sha1-w5KZDD5oR4PYOLjISkXYoEhFg2E=", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", "dev": true, - "optional": true, "requires": { - "flat-cache": "^1.2.1", - "object-assign": "^4.0.1" + "flat-cache": "^2.0.1" } }, "file-loader": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-3.0.1.tgz", - "integrity": "sha512-4sNIOXgtH/9WZq4NvlfU3Opn5ynUsqBwSLyM+I7UOwdGigTBYfVVQEwe/msZNX/j4pCJTIM14Fsw66Svo1oVrw==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-4.3.0.tgz", + "integrity": "sha512-aKrYPYjF1yG3oX0kWRrqrSMfgftm7oJW5M+m4owoldH5C51C0RkIwB++JbRvEW3IU6/ZG5n8UvEcdgwOt2UOWA==", "dev": true, "requires": { - "loader-utils": "^1.0.2", - "schema-utils": "^1.0.0" - }, - "dependencies": { - "schema-utils": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", - "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", - "dev": true, - "requires": { - "ajv": "^6.1.0", - "ajv-errors": "^1.0.0", - "ajv-keywords": "^3.1.0" - } - } + "loader-utils": "^1.2.3", + "schema-utils": "^2.5.0" } }, "filesize": { @@ -4926,61 +5433,53 @@ } } }, - "find-babel-config": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/find-babel-config/-/find-babel-config-1.2.0.tgz", - "integrity": "sha512-jB2CHJeqy6a820ssiqwrKMeyC6nNdmrcgkKWJWmpoxpE8RKciYJXCcXRq1h2AzCo5I5BJeN2tkGEO3hLTuePRA==", - "dev": true, - "requires": { - "json5": "^0.5.1", - "path-exists": "^3.0.0" - }, - "dependencies": { - "json5": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/json5/-/json5-0.5.1.tgz", - "integrity": "sha1-Hq3nrMASA0rYTiOWdn6tn6VJWCE=", - "dev": true - } - } - }, "find-cache-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", - "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", + "integrity": "sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==", "dev": true, "requires": { "commondir": "^1.0.1", - "make-dir": "^2.0.0", - "pkg-dir": "^3.0.0" + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" } }, "find-up": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", - "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", "dev": true, "requires": { - "locate-path": "^2.0.0" + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" } }, "flat-cache": { - "version": "1.3.4", - "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-1.3.4.tgz", - "integrity": "sha512-VwyB3Lkgacfik2vhqR4uv2rvebqmDvFu4jlN/C1RzWoJEo8I7z4Q404oiqYCkq41mni8EzQnm95emU9seckwtg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", "dev": true, - "optional": true, "requires": { - "circular-json": "^0.3.1", - "graceful-fs": "^4.1.2", - "rimraf": "~2.6.2", - "write": "^0.2.1" + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + }, + "dependencies": { + "rimraf": { + "version": "2.6.3", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", + "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + } } }, "flatted": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz", - "integrity": "sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.2.tgz", + "integrity": "sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA==", "dev": true }, "flush-write-stream": { @@ -4994,24 +5493,10 @@ } }, "follow-redirects": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.7.0.tgz", - "integrity": "sha512-m/pZQy4Gj287eNy94nivy5wchN3Kp+Q5WgUPNy5lJSZ3sgkVKSYV/ZChMAQVIgx1SqfZ2zBZtPA2YlXIWxxJOQ==", - "dev": true, - "requires": { - "debug": "^3.2.6" - }, - "dependencies": { - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", - "dev": true, - "requires": { - "ms": "^2.1.1" - } - } - } + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.13.2.tgz", + "integrity": "sha512-6mPTgLxYm3r6Bkkg0vNM0HTjfGrOEtsfbhagQvbxDEsEkpNhw582upBaoRZylzen6krEmxXJgt9Ju6HiI4O7BA==", + "dev": true }, "for-in": { "version": "1.0.2", @@ -5024,626 +5509,94 @@ "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=", "dev": true - }, - "form-data": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", - "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", - "dev": true, - "requires": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.6", - "mime-types": "^2.1.12" - } - }, - "forwarded": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", - "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=", - "dev": true - }, - "fragment-cache": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", - "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", - "dev": true, - "requires": { - "map-cache": "^0.2.2" - } - }, - "fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=", - "dev": true - }, - "from2": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", - "integrity": "sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8=", - "dev": true, - "requires": { - "inherits": "^2.0.1", - "readable-stream": "^2.0.0" - } - }, - "fs-extra": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", - "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "jsonfile": "^4.0.0", - "universalify": "^0.1.0" - } - }, - "fs-write-stream-atomic": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz", - "integrity": "sha1-tH31NJPvkR33VzHnCp3tAYnbQMk=", - "dev": true, - "requires": { - "graceful-fs": "^4.1.2", - "iferr": "^0.1.5", - "imurmurhash": "^0.1.4", - "readable-stream": "1 || 2" - } - }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", - "dev": true - }, - "fsevents": { - "version": "1.2.9", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.9.tgz", - "integrity": "sha512-oeyj2H3EjjonWcFjD5NvZNE9Rqe4UW+nQBU2HNeKw0koVLEFIhtyETyAakeAM3de7Z/SW5kcA+fZUait9EApnw==", - "dev": true, - "optional": true, - "requires": { - "nan": "^2.12.1", - "node-pre-gyp": "^0.12.0" - }, - "dependencies": { - "abbrev": { - "version": "1.1.1", - "bundled": true, - "dev": true, - "optional": true - }, - "ansi-regex": { - "version": "2.1.1", - "bundled": true, - "dev": true, - "optional": true - }, - "aproba": { - "version": "1.2.0", - "bundled": true, - "dev": true, - "optional": true - }, - "are-we-there-yet": { - "version": "1.1.5", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "delegates": "^1.0.0", - "readable-stream": "^2.0.6" - } - }, - "balanced-match": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "optional": true - }, - "brace-expansion": { - "version": "1.1.11", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "chownr": { - "version": "1.1.1", - "bundled": true, - "dev": true, - "optional": true - }, - "code-point-at": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "optional": true - }, - "concat-map": { - "version": "0.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "console-control-strings": { - "version": "1.1.0", - "bundled": true, - "dev": true, - "optional": true - }, - "core-util-is": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true - }, - "debug": { - "version": "4.1.1", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "ms": "^2.1.1" - } - }, - "deep-extend": { - "version": "0.6.0", - "bundled": true, - "dev": true, - "optional": true - }, - "delegates": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "optional": true - }, - "detect-libc": { - "version": "1.0.3", - "bundled": true, - "dev": true, - "optional": true - }, - "fs-minipass": { - "version": "1.2.5", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "minipass": "^2.2.1" - } - }, - "fs.realpath": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "optional": true - }, - "gauge": { - "version": "2.7.4", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "aproba": "^1.0.3", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.0", - "object-assign": "^4.1.0", - "signal-exit": "^3.0.0", - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1", - "wide-align": "^1.1.0" - } - }, - "glob": { - "version": "7.1.3", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.4", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "has-unicode": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "iconv-lite": { - "version": "0.4.24", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "safer-buffer": ">= 2.1.2 < 3" - } - }, - "ignore-walk": { - "version": "3.0.1", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "minimatch": "^3.0.4" - } - }, - "inflight": { - "version": "1.0.6", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "inherits": { - "version": "2.0.3", - "bundled": true, - "dev": true, - "optional": true - }, - "ini": { - "version": "1.3.5", - "bundled": true, - "dev": true, - "optional": true - }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "isarray": { - "version": "1.0.0", - "bundled": true, - "dev": true, - "optional": true - }, - "minimatch": { - "version": "3.0.4", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "brace-expansion": "^1.1.7" - } - }, - "minimist": { - "version": "0.0.8", - "bundled": true, - "dev": true, - "optional": true - }, - "minipass": { - "version": "2.3.5", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "safe-buffer": "^5.1.2", - "yallist": "^3.0.0" - } - }, - "minizlib": { - "version": "1.2.1", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "minipass": "^2.2.1" - } - }, - "mkdirp": { - "version": "0.5.1", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "minimist": "0.0.8" - } - }, - "ms": { - "version": "2.1.1", - "bundled": true, - "dev": true, - "optional": true - }, - "needle": { - "version": "2.3.0", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "debug": "^4.1.0", - "iconv-lite": "^0.4.4", - "sax": "^1.2.4" - } - }, - "node-pre-gyp": { - "version": "0.12.0", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "detect-libc": "^1.0.2", - "mkdirp": "^0.5.1", - "needle": "^2.2.1", - "nopt": "^4.0.1", - "npm-packlist": "^1.1.6", - "npmlog": "^4.0.2", - "rc": "^1.2.7", - "rimraf": "^2.6.1", - "semver": "^5.3.0", - "tar": "^4" - } - }, - "nopt": { - "version": "4.0.1", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "abbrev": "1", - "osenv": "^0.1.4" - } - }, - "npm-bundled": { - "version": "1.0.6", - "bundled": true, - "dev": true, - "optional": true - }, - "npm-packlist": { - "version": "1.4.1", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "ignore-walk": "^3.0.1", - "npm-bundled": "^1.0.1" - } - }, - "npmlog": { - "version": "4.1.2", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "are-we-there-yet": "~1.1.2", - "console-control-strings": "~1.1.0", - "gauge": "~2.7.3", - "set-blocking": "~2.0.0" - } - }, - "number-is-nan": { - "version": "1.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "object-assign": { - "version": "4.1.1", - "bundled": true, - "dev": true, - "optional": true - }, - "once": { - "version": "1.4.0", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "wrappy": "1" - } - }, - "os-homedir": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true - }, - "os-tmpdir": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true - }, - "osenv": { - "version": "0.1.5", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "os-homedir": "^1.0.0", - "os-tmpdir": "^1.0.0" - } - }, - "path-is-absolute": { - "version": "1.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "process-nextick-args": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "optional": true - }, - "rc": { - "version": "1.2.8", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "deep-extend": "^0.6.0", - "ini": "~1.3.0", - "minimist": "^1.2.0", - "strip-json-comments": "~2.0.1" - }, - "dependencies": { - "minimist": { - "version": "1.2.0", - "bundled": true, - "dev": true, - "optional": true - } - } - }, - "readable-stream": { - "version": "2.3.6", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "rimraf": { - "version": "2.6.3", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "glob": "^7.1.3" - } - }, - "safe-buffer": { - "version": "5.1.2", - "bundled": true, - "dev": true, - "optional": true - }, - "safer-buffer": { - "version": "2.1.2", - "bundled": true, - "dev": true, - "optional": true - }, - "sax": { - "version": "1.2.4", - "bundled": true, - "dev": true, - "optional": true - }, - "semver": { - "version": "5.7.0", - "bundled": true, - "dev": true, - "optional": true - }, - "set-blocking": { - "version": "2.0.0", - "bundled": true, - "dev": true, - "optional": true - }, - "signal-exit": { - "version": "3.0.2", - "bundled": true, - "dev": true, - "optional": true - }, - "string-width": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - }, - "string_decoder": { - "version": "1.1.1", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "safe-buffer": "~5.1.0" - } - }, - "strip-ansi": { - "version": "3.0.1", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "ansi-regex": "^2.0.0" - } - }, - "strip-json-comments": { - "version": "2.0.1", - "bundled": true, - "dev": true, - "optional": true - }, - "tar": { - "version": "4.4.8", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "chownr": "^1.1.1", - "fs-minipass": "^1.2.5", - "minipass": "^2.3.4", - "minizlib": "^1.1.1", - "mkdirp": "^0.5.0", - "safe-buffer": "^5.1.2", - "yallist": "^3.0.2" - } - }, - "util-deprecate": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true - }, - "wide-align": { - "version": "1.1.3", - "bundled": true, - "dev": true, - "optional": true, - "requires": { - "string-width": "^1.0.2 || 2" - } - }, - "wrappy": { - "version": "1.0.2", - "bundled": true, - "dev": true, - "optional": true - }, - "yallist": { - "version": "3.0.3", - "bundled": true, - "dev": true, - "optional": true - } + }, + "form-data": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", + "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", + "dev": true, + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "forwarded": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", + "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=", + "dev": true + }, + "fragment-cache": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", + "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", + "dev": true, + "requires": { + "map-cache": "^0.2.2" + } + }, + "fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=", + "dev": true + }, + "from2": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", + "integrity": "sha1-i/tVAr3kpNNs/e6gB/zKIdfjgq8=", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "readable-stream": "^2.0.0" + } + }, + "fs-extra": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", + "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "fs-write-stream-atomic": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/fs-write-stream-atomic/-/fs-write-stream-atomic-1.0.10.tgz", + "integrity": "sha1-tH31NJPvkR33VzHnCp3tAYnbQMk=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "iferr": "^0.1.5", + "imurmurhash": "^0.1.4", + "readable-stream": "1 || 2" } }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fsevents": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.1.tgz", + "integrity": "sha512-YR47Eg4hChJGAB1O3yEAOkGO+rlzutoICGqGo9EZ4lKWokzZRSyIW1QmTzqjtw8MJdj9srP869CuWw/hyzSiBw==", + "dev": true, + "optional": true + }, "function-bind": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", @@ -5656,12 +5609,29 @@ "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", "dev": true }, + "gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true + }, "get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", "dev": true }, + "get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dev": true, + "requires": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + } + }, "get-stream": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", @@ -5687,9 +5657,9 @@ } }, "glob": { - "version": "7.1.4", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.4.tgz", - "integrity": "sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A==", + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -5701,24 +5671,12 @@ } }, "glob-parent": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", - "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", + "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", "dev": true, "requires": { - "is-glob": "^3.1.0", - "path-dirname": "^1.0.0" - }, - "dependencies": { - "is-glob": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", - "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", - "dev": true, - "requires": { - "is-extglob": "^2.1.0" - } - } + "is-glob": "^4.0.1" } }, "glob-to-regexp": { @@ -5747,20 +5705,12 @@ "ignore": "^4.0.3", "pify": "^4.0.1", "slash": "^2.0.0" - }, - "dependencies": { - "ignore": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", - "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", - "dev": true - } } }, "graceful-fs": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.0.tgz", - "integrity": "sha512-jpSvDPV4Cq/bgtpndIWbI5hmYxhQGHPC4d4cqBPb4DLniCfhJokdXhwhaDuLBGLQdvvRum/UiX6ECVIPvDXqdg==", + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.4.tgz", + "integrity": "sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw==", "dev": true }, "gzip-size": { @@ -5774,9 +5724,9 @@ } }, "handle-thing": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.0.tgz", - "integrity": "sha512-d4sze1JNC454Wdo2fkuyzCr6aHcbL6PGGuFAz0Li/NcOm1tCHGnWDRmJP85dh9IhQErTc2svWFEX5xHIOo//kQ==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz", + "integrity": "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==", "dev": true }, "har-schema": { @@ -5786,12 +5736,12 @@ "dev": true }, "har-validator": { - "version": "5.1.3", - "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", - "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", + "version": "5.1.5", + "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz", + "integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==", "dev": true, "requires": { - "ajv": "^6.5.5", + "ajv": "^6.12.3", "har-schema": "^2.0.0" } }, @@ -5804,23 +5754,6 @@ "function-bind": "^1.1.1" } }, - "has-ansi": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/has-ansi/-/has-ansi-2.0.0.tgz", - "integrity": "sha1-NPUEnOHs3ysGSa8+8k5F7TVBbZE=", - "dev": true, - "requires": { - "ansi-regex": "^2.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", - "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", - "dev": true - } - } - }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -5828,9 +5761,9 @@ "dev": true }, "has-symbols": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.0.tgz", - "integrity": "sha1-uhqPGvKg/DllD1yFA2dwQSIGO0Q=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", "dev": true }, "has-value": { @@ -5866,19 +5799,39 @@ } }, "hash-base": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", - "integrity": "sha1-X8hoaEfs1zSZQDMZprCj8/auSRg=", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.1.0.tgz", + "integrity": "sha512-1nmYp/rhMDiE7AYkDw+lLwlAzz0AntGIe51F3RfFfEqyQ3feY2eI/NcwC6umIQVOASPMsWJLJScWKSSvzL9IVA==", "dev": true, "requires": { - "inherits": "^2.0.1", - "safe-buffer": "^5.0.1" + "inherits": "^2.0.4", + "readable-stream": "^3.6.0", + "safe-buffer": "^5.2.0" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + } } }, "hash-sum": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/hash-sum/-/hash-sum-1.0.2.tgz", - "integrity": "sha1-M7QHd3VMZDJXPBIMw4CLvRDUfwQ=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/hash-sum/-/hash-sum-2.0.0.tgz", + "integrity": "sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==", "dev": true }, "hash.js": { @@ -5904,9 +5857,9 @@ "dev": true }, "highlight.js": { - "version": "9.15.8", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-9.15.8.tgz", - "integrity": "sha512-RrapkKQWwE+wKdF73VsOa2RQdIoO3mxwJ4P8mhbI6KYJUraUHRKM5w5zQQKXNk0xNL4UVRdulV9SBJcmzJNzVA==", + "version": "10.5.0", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.5.0.tgz", + "integrity": "sha512-xTmvd9HiIHR6L53TMC7TKolEj65zG1XU+Onr8oi86mYa+nLcIbxTTWkpW7CsEwv/vK7u1zb8alZIMLDqqN6KTw==", "dev": true }, "hmac-drbg": { @@ -5927,9 +5880,9 @@ "dev": true }, "hosted-git-info": { - "version": "2.7.1", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.7.1.tgz", - "integrity": "sha512-7T/BxH19zbcCTa8XkMlbK5lTo1WtgkFi3GvdWEyNuc4Vex7/9Dqbnpsf4JMydcfj9HCg4zUWFTL3Za6lapg5/w==", + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.8.tgz", + "integrity": "sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg==", "dev": true }, "hpack.js": { @@ -5963,9 +5916,9 @@ "dev": true }, "html-entities": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-1.2.1.tgz", - "integrity": "sha1-DfKTUfByEWNRXfueVUPl9u7VFi8=", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/html-entities/-/html-entities-1.4.0.tgz", + "integrity": "sha512-8nxjcBcd8wovbeKx7h3wTji4e6+rhaVuPNpMqwWgnHh+N9ToqsCs6XztWRBPQ+UtzsoMAdKZtUENoVzU/EMtZA==", "dev": true }, "html-minifier": { @@ -5992,9 +5945,9 @@ } }, "html-tags": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-2.0.0.tgz", - "integrity": "sha1-ELMKOGCF9Dzt41PMj6fLDe7qZos=", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/html-tags/-/html-tags-3.1.0.tgz", + "integrity": "sha512-1qYz89hW3lFDEazhjW0yVAV87lw8lVkrJocr72XmBkMKsoSVJCQx3W8BXsC7hO2qAt8BoVjYjtAcZ9perqGnNg==", "dev": true }, "html-webpack-plugin": { @@ -6018,6 +5971,12 @@ "integrity": "sha512-+hN/Zh2D08Mx65pZ/4g5bsmNiZUuChDiQfTUQ7qJr4/kuopCr88xZsAXv6mBoZEsUI4OuGHlX59qE94K2mMW8Q==", "dev": true }, + "emojis-list": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-2.1.0.tgz", + "integrity": "sha1-TapNnbAPmBmIDHn6RXrlsJof04k=", + "dev": true + }, "json5": { "version": "0.5.1", "resolved": "https://registry.npmjs.org/json5/-/json5-0.5.1.tgz", @@ -6035,6 +5994,16 @@ "json5": "^0.5.0", "object-assign": "^4.0.1" } + }, + "util.promisify": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/util.promisify/-/util.promisify-1.0.0.tgz", + "integrity": "sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA==", + "dev": true, + "requires": { + "define-properties": "^1.1.2", + "object.getownpropertydescriptors": "^2.0.3" + } } } }, @@ -6052,10 +6021,16 @@ "readable-stream": "^3.1.1" }, "dependencies": { + "entities": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/entities/-/entities-1.1.2.tgz", + "integrity": "sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==", + "dev": true + }, "readable-stream": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", - "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", "dev": true, "requires": { "inherits": "^2.0.3", @@ -6093,9 +6068,9 @@ } }, "http-parser-js": { - "version": "0.4.10", - "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.4.10.tgz", - "integrity": "sha1-ksnBN0w1CF912zWexWzCV8u5P6Q=", + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.3.tgz", + "integrity": "sha512-t7hjvef/5HEK7RWTdUzVUhl8zkEu+LlaE0IYzdMuvbSDipxBRpOn4Uhw8ZyECEa808iVT8XCjzo6xmYt4CiLZg==", "dev": true }, "http-proxy": { @@ -6107,14 +6082,6 @@ "eventemitter3": "^4.0.0", "follow-redirects": "^1.0.0", "requires-port": "^1.0.0" - }, - "dependencies": { - "eventemitter3": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", - "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", - "dev": true - } } }, "http-proxy-middleware": { @@ -6146,6 +6113,12 @@ "integrity": "sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=", "dev": true }, + "human-signals": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz", + "integrity": "sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw==", + "dev": true + }, "iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", @@ -6155,44 +6128,19 @@ "safer-buffer": ">= 2.1.2 < 3" } }, - "icss-replace-symbols": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/icss-replace-symbols/-/icss-replace-symbols-1.1.0.tgz", - "integrity": "sha1-Bupvg2ead0njhs/h/oEq5dsiPe0=", - "dev": true - }, "icss-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-2.1.0.tgz", - "integrity": "sha1-g/Cg7DeL8yRheLbCrZE28TWxyWI=", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/icss-utils/-/icss-utils-4.1.1.tgz", + "integrity": "sha512-4aFq7wvWyMHKgxsH8QQtGpvbASCf+eM3wPRLI6R+MgAnTCZ6STYsRvttLvRWK0Nfif5piF394St3HeJDaljGPA==", "dev": true, "requires": { - "postcss": "^6.0.1" - }, - "dependencies": { - "postcss": { - "version": "6.0.23", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", - "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", - "dev": true, - "requires": { - "chalk": "^2.4.1", - "source-map": "^0.6.1", - "supports-color": "^5.4.0" - } - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true - } + "postcss": "^7.0.14" } }, "ieee754": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", - "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", "dev": true }, "iferr": { @@ -6202,9 +6150,9 @@ "dev": true }, "ignore": { - "version": "3.3.10", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-3.3.10.tgz", - "integrity": "sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==", + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", "dev": true }, "import-cwd": { @@ -6224,23 +6172,6 @@ "requires": { "caller-path": "^2.0.0", "resolve-from": "^3.0.0" - }, - "dependencies": { - "caller-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/caller-path/-/caller-path-2.0.0.tgz", - "integrity": "sha1-Ro+DBE42mrIBD6xfBs7uFbsssfQ=", - "dev": true, - "requires": { - "caller-callsite": "^2.0.0" - } - }, - "resolve-from": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", - "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=", - "dev": true - } } }, "import-from": { @@ -6250,14 +6181,6 @@ "dev": true, "requires": { "resolve-from": "^3.0.0" - }, - "dependencies": { - "resolve-from": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", - "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=", - "dev": true - } } }, "import-local": { @@ -6268,6 +6191,51 @@ "requires": { "pkg-dir": "^3.0.0", "resolve-cwd": "^2.0.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "dev": true, + "requires": { + "find-up": "^3.0.0" + } + } } }, "imurmurhash": { @@ -6276,12 +6244,24 @@ "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", "dev": true }, + "indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true + }, "indexes-of": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/indexes-of/-/indexes-of-1.0.1.tgz", "integrity": "sha1-8w9xbI4r00bHtn0985FVZqfAVgc=", "dev": true }, + "infer-owner": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", + "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==", + "dev": true + }, "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -6296,46 +6276,110 @@ "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "inquirer": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-3.3.0.tgz", - "integrity": "sha512-h+xtnyk4EwKvFWHrUYsWErEVR+igKtLdchu+o0Z1RL7VU/jVMFbYir2bp6bAj8efFNxWqHX0dIss6fJQ+/+qeQ==", - "dev": true, - "optional": true, - "requires": { - "ansi-escapes": "^3.0.0", - "chalk": "^2.0.0", - "cli-cursor": "^2.1.0", - "cli-width": "^2.0.0", - "external-editor": "^2.0.4", - "figures": "^2.0.0", - "lodash": "^4.3.0", - "mute-stream": "0.0.7", - "run-async": "^2.2.0", - "rx-lite": "^4.0.8", - "rx-lite-aggregates": "^4.0.8", - "string-width": "^2.1.0", - "strip-ansi": "^4.0.0", + "dev": true + }, + "inquirer": { + "version": "7.3.3", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz", + "integrity": "sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==", + "dev": true, + "requires": { + "ansi-escapes": "^4.2.1", + "chalk": "^4.1.0", + "cli-cursor": "^3.1.0", + "cli-width": "^3.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.19", + "mute-stream": "0.0.8", + "run-async": "^2.4.0", + "rxjs": "^6.6.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0", "through": "^2.3.6" }, "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, - "optional": true + "requires": { + "color-convert": "^2.0.1" + } }, - "strip-ansi": { + "chalk": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz", + "integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + } + }, + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "requires": { + "restore-cursor": "^3.1.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true + }, + "onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "requires": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + } + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, - "optional": true, "requires": { - "ansi-regex": "^3.0.0" + "has-flag": "^4.0.0" } } } @@ -6362,19 +6406,10 @@ } } }, - "invariant": { - "version": "2.2.4", - "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", - "integrity": "sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==", - "dev": true, - "requires": { - "loose-envify": "^1.0.0" - } - }, - "invert-kv": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-2.0.0.tgz", - "integrity": "sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA==", + "interpret": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", "dev": true }, "ip": { @@ -6390,9 +6425,9 @@ "dev": true }, "ipaddr.js": { - "version": "1.9.0", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.0.tgz", - "integrity": "sha512-M4Sjn6N/+O6/IXSJseKqHoFc+5FdGJ22sXqnjTpdZweHK64MzEPAyQZyEU3R/KRv2GLoa7nNtg/C2Ev6m7z+eA==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", "dev": true }, "is-absolute-url": { @@ -6421,6 +6456,15 @@ } } }, + "is-arguments": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.1.0.tgz", + "integrity": "sha512-1Ij4lOMPl/xB5kBDn7I+b2ttPMKa8szhEIrXDuXQD/oe3HJLTLhqhgGspwgyGd6MOywBUqVvYicF72lkgDnIHg==", + "dev": true, + "requires": { + "call-bind": "^1.0.0" + } + }, "is-arrayish": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", @@ -6428,12 +6472,12 @@ "dev": true }, "is-binary-path": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", - "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", "dev": true, "requires": { - "binary-extensions": "^1.0.0" + "binary-extensions": "^2.0.0" } }, "is-buffer": { @@ -6443,9 +6487,9 @@ "dev": true }, "is-callable": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.1.4.tgz", - "integrity": "sha512-r5p9sxJjYnArLjObpjA4xu5EKI3CuKHkJXMhT7kwbpUyIFD1n5PMAsoPvWnvtZiNz7LjkYDRZhd7FlI0eMijEA==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.3.tgz", + "integrity": "sha512-J1DcMe8UYTBSrKezuIUTUwjXsho29693unXM2YhJUTR2txK/eG47bvNa/wipPFmZFgr/N6f1GA66dv0mEyTIyQ==", "dev": true }, "is-ci": { @@ -6471,6 +6515,15 @@ "rgba-regex": "^1.0.0" } }, + "is-core-module": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", + "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, "is-data-descriptor": { "version": "0.1.4", "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", @@ -6492,9 +6545,9 @@ } }, "is-date-object": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.1.tgz", - "integrity": "sha1-mqIOtq7rv/d/vTPnTKAbM1gdOhY=", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", "dev": true }, "is-descriptor": { @@ -6522,6 +6575,12 @@ "integrity": "sha1-YTObbyR1/Hcv2cnYP1yFddwVSuE=", "dev": true }, + "is-docker": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-docker/-/is-docker-2.1.1.tgz", + "integrity": "sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw==", + "dev": true + }, "is-extendable": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", @@ -6535,9 +6594,9 @@ "dev": true }, "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true }, "is-glob": { @@ -6549,6 +6608,12 @@ "is-extglob": "^2.1.1" } }, + "is-negative-zero": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz", + "integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==", + "dev": true + }, "is-number": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", @@ -6570,9 +6635,9 @@ } }, "is-obj": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-1.0.1.tgz", - "integrity": "sha1-PkcprB9f3gJc19g6iW2rn09n2w8=", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", "dev": true }, "is-path-cwd": { @@ -6614,19 +6679,14 @@ "isobject": "^3.0.1" } }, - "is-promise": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", - "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=", - "dev": true - }, "is-regex": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", - "integrity": "sha1-VRdIm1RwkbCTDglWVM7SXul+lJE=", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.2.tgz", + "integrity": "sha512-axvdhb5pdhEVThqJzYXwMlVuZwC+FF2DpcOhTS+y/8jVq4trxyPgfcwIxIKiyeuLlSQYKkmUaPQJ8ZE4yNKXDg==", "dev": true, "requires": { - "has": "^1.0.1" + "call-bind": "^1.0.2", + "has-symbols": "^1.0.1" } }, "is-resolvable": { @@ -6651,12 +6711,12 @@ } }, "is-symbol": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.2.tgz", - "integrity": "sha512-HS8bZ9ox60yCJLH9snBpIwv9pYUAkcuLhSA1oero1UB5y9aiQpRA8y2ex945AOtCZL1lJDeIk3G5LthswI46Lw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", "dev": true, "requires": { - "has-symbols": "^1.0.0" + "has-symbols": "^1.0.1" } }, "is-typedarray": { @@ -6702,30 +6762,51 @@ "dev": true }, "javascript-stringify": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-1.6.0.tgz", - "integrity": "sha1-FC0RHzpuPa6PSpr9d9RYVbWpzOM=", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/javascript-stringify/-/javascript-stringify-2.0.1.tgz", + "integrity": "sha512-yV+gqbd5vaOYjqlbk16EG89xB5udgjqQF3C5FAORDg4f/IS1Yc5ERCv5e/57yBcfJYw05V5JyIXabhwb75Xxow==", "dev": true }, - "js-levenshtein": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/js-levenshtein/-/js-levenshtein-1.1.6.tgz", - "integrity": "sha512-X2BB11YZtrRqY4EnQcLX5Rh373zbK4alC1FW7D7MBhL2gtcC17cTnr6DmfHZeS0s2rTHjUTMMHfG7gO8SSdw+g==", - "dev": true + "jest-worker": { + "version": "25.5.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-25.5.0.tgz", + "integrity": "sha512-/dsSmUkIy5EBGfv/IjjqmFxrNAUpBERfGs1oHROyD7yxjG/w+t0GOJDX8O1k32ySmd7+a5IhnJU2qQFcJ4n1vw==", + "dev": true, + "requires": { + "merge-stream": "^2.0.0", + "supports-color": "^7.0.0" + }, + "dependencies": { + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } }, "js-message": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/js-message/-/js-message-1.0.5.tgz", - "integrity": "sha1-IwDSSxrwjondCVvBpMnJz8uJLRU=", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/js-message/-/js-message-1.0.7.tgz", + "integrity": "sha512-efJLHhLjIyKRewNS9EGZ4UpI8NguuL6fKkhRxVuMmrGV2xN/0APGdQYwLFky5w9naebSZ0OwAGp0G6/2Cg90rA==", "dev": true }, "js-queue": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/js-queue/-/js-queue-2.0.0.tgz", - "integrity": "sha1-NiITz4YPRo8BJfxslqvBdCUx+Ug=", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/js-queue/-/js-queue-2.0.2.tgz", + "integrity": "sha512-pbKLsbCfi7kriM3s1J4DDCo7jQkI58zPLHi0heXPzPlj0hjUsm+FesPUbE0DSbIVIK503A36aUBoCN7eMFedkA==", "dev": true, "requires": { - "easy-stack": "^1.0.0" + "easy-stack": "^1.0.1" } }, "js-tokens": { @@ -6735,9 +6816,9 @@ "dev": true }, "js-yaml": { - "version": "3.13.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", - "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", "dev": true, "requires": { "argparse": "^1.0.7", @@ -6762,6 +6843,12 @@ "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", "dev": true }, + "json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", @@ -6793,12 +6880,12 @@ "dev": true }, "json5": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.1.0.tgz", - "integrity": "sha512-8Mh9h6xViijj36g7Dxi+Y4S6hNGV96vcJZr/SrlHh1LR/pEn/8j/+qIBbs44YKl69Lrfctp4QD+AdWLTMqEZAQ==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", + "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", "dev": true, "requires": { - "minimist": "^1.2.0" + "minimist": "^1.2.5" } }, "jsonfile": { @@ -6810,12 +6897,6 @@ "graceful-fs": "^4.1.6" } }, - "jsonify": { - "version": "0.0.0", - "resolved": "https://registry.npmjs.org/jsonify/-/jsonify-0.0.0.tgz", - "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=", - "dev": true - }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", @@ -6835,9 +6916,15 @@ "dev": true }, "kind-of": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.2.tgz", - "integrity": "sha512-s5kLOcnH0XqDO+FvuaLX8DDjZ18CGFk7VygH40QoKPUQhW4e2rvM0rwUq0t8IQDOwYSeLK01U90OjzBTme2QqA==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true + }, + "klona": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/klona/-/klona-2.0.4.tgz", + "integrity": "sha512-ZRbnvdg/NxqzC7L9Uyqzf4psi1OM4Cuc+sJAkQPjO6XkQIJTNbfK2Rsmbw8fx1p2mkZdp2FZYo2+LwXYY/uwIA==", "dev": true }, "launch-editor": { @@ -6859,15 +6946,6 @@ "launch-editor": "^2.2.1" } }, - "lcid": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/lcid/-/lcid-2.0.0.tgz", - "integrity": "sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA==", - "dev": true, - "requires": { - "invert-kv": "^2.0.0" - } - }, "levn": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", @@ -6885,13 +6963,13 @@ "dev": true }, "loader-fs-cache": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/loader-fs-cache/-/loader-fs-cache-1.0.2.tgz", - "integrity": "sha512-70IzT/0/L+M20jUlEqZhZyArTU6VKLRTYRDAYN26g4jfzpJqjipLL3/hgYpySqI9PwsVRHHFja0LfEmsx9X2Cw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/loader-fs-cache/-/loader-fs-cache-1.0.3.tgz", + "integrity": "sha512-ldcgZpjNJj71n+2Mf6yetz+c9bM4xpKtNds4LbqXzU/PTdeAX0g3ytnU1AJMEcTk2Lex4Smpe3Q/eCTsvUBxbA==", "dev": true, "requires": { "find-cache-dir": "^0.1.1", - "mkdirp": "0.5.1" + "mkdirp": "^0.5.1" }, "dependencies": { "find-cache-dir": { @@ -6942,13 +7020,13 @@ "dev": true }, "loader-utils": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.2.3.tgz", - "integrity": "sha512-fkpz8ejdnEMG3s37wGL07iSBDg99O9D5yflE9RGNH3hRdx9SOwYfnGYdZOUIZitN8E+E2vkq3MUMYMvPYl5ZZA==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-1.4.0.tgz", + "integrity": "sha512-qH0WSMBtn/oHuwjy/NucEgbx5dbxxnxup9s4PVXJUDHZBQY+s0NWA9rJf53RBnQZxfch7euUui7hpoAPvALZdA==", "dev": true, "requires": { "big.js": "^5.2.2", - "emojis-list": "^2.0.0", + "emojis-list": "^3.0.0", "json5": "^1.0.1" }, "dependencies": { @@ -6964,25 +7042,18 @@ } }, "locate-path": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", - "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", "dev": true, "requires": { - "p-locate": "^2.0.0", - "path-exists": "^3.0.0" + "p-locate": "^4.1.0" } }, "lodash": { - "version": "4.17.19", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.19.tgz", - "integrity": "sha512-JNvd8XER9GQX0v2qJgsaN/mzFCNA5BRe/j8JN9d+tWyGLSodKQHKFicdwNYzWwI3wjRnaKPsGj1XkBjx/F96DQ==", - "dev": true - }, - "lodash.clonedeep": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", - "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=", + "version": "4.17.20", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", + "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==", "dev": true }, "lodash.defaultsdeep": { @@ -7031,20 +7102,11 @@ } }, "loglevel": { - "version": "1.6.3", - "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.6.3.tgz", - "integrity": "sha512-LoEDv5pgpvWgPF4kNYuIp0qqSJVWak/dML0RY74xlzMZiT9w77teNAwKYKWBTYjlokMirg+o3jBwp+vlLrcfAA==", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/loglevel/-/loglevel-1.7.1.tgz", + "integrity": "sha512-Hesni4s5UkWkwCGJMQGAh71PaLUmKFM60dHvq0zi/vDhhrzuk+4GgNbTXJ12YYQJn6ZKBDNIjYcuQGKudvqrIw==", "dev": true }, - "loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", - "dev": true, - "requires": { - "js-tokens": "^3.0.0 || ^4.0.0" - } - }, "lower-case": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-1.1.4.tgz", @@ -7061,22 +7123,20 @@ } }, "make-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", - "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", - "dev": true, - "requires": { - "pify": "^4.0.1", - "semver": "^5.6.0" - } - }, - "map-age-cleaner": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz", - "integrity": "sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "dev": true, "requires": { - "p-defer": "^1.0.0" + "semver": "^6.0.0" + }, + "dependencies": { + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true + } } }, "map-cache": { @@ -7106,9 +7166,9 @@ } }, "mdn-data": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-1.1.4.tgz", - "integrity": "sha512-FSYbp3lyKjyj3E7fMl6rYvUdX0FBXaluGqlFoYESWQlyUTq8R+wp0rkFxoYFqZlHCvsUXGjyJmLQSnXToYhOSA==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.4.tgz", + "integrity": "sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==", "dev": true }, "media-typer": { @@ -7117,25 +7177,6 @@ "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=", "dev": true }, - "mem": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/mem/-/mem-4.3.0.tgz", - "integrity": "sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w==", - "dev": true, - "requires": { - "map-age-cleaner": "^0.1.1", - "mimic-fn": "^2.0.0", - "p-is-promise": "^2.0.0" - }, - "dependencies": { - "mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true - } - } - }, "memory-fs": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/memory-fs/-/memory-fs-0.4.1.tgz", @@ -7169,10 +7210,16 @@ } } }, + "merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, "merge2": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.2.3.tgz", - "integrity": "sha512-gdUU1Fwj5ep4kplwcmftruWofEFt6lfpkkr3h860CXbAB9c3hGb55EOL2ali0Td5oebvW0E1+3Sr+Ur7XfKpRA==", + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", "dev": true }, "methods": { @@ -7210,27 +7257,35 @@ "requires": { "bn.js": "^4.0.0", "brorand": "^1.0.1" + }, + "dependencies": { + "bn.js": { + "version": "4.11.9", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", + "integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==", + "dev": true + } } }, "mime": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/mime/-/mime-2.4.4.tgz", - "integrity": "sha512-LRxmNwziLPT828z+4YkNzloCFC2YM4wrB99k+AV5ZbEyfGNWfG8SO1FUXLmLDBSo89NrJZ4DIWeLjy1CHGhMGA==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.5.0.tgz", + "integrity": "sha512-ft3WayFSFUVBuJj7BMLKAQcSlItKtfjsKDDsii3rqFDAZ7t11zRe8ASw/GlmivGwVUYtwkQrxiGGpL6gFvB0ag==", "dev": true }, "mime-db": { - "version": "1.40.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz", - "integrity": "sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA==", + "version": "1.45.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.45.0.tgz", + "integrity": "sha512-CkqLUxUk15hofLoLyljJSrukZi8mAtgd+yE5uO4tqRZsdsAJKv0O+rFMhVDRJgozy+yG6md5KwuXhD4ocIoP+w==", "dev": true }, "mime-types": { - "version": "2.1.24", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz", - "integrity": "sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ==", + "version": "2.1.28", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.28.tgz", + "integrity": "sha512-0TO2yJ5YHYr7M2zzT7gDU1tbwHxEUWBCLt0lscSNpcdAfFyJOVEpRYNS7EXVcTLNj/25QO8gulHC5JtTzSE2UQ==", "dev": true, "requires": { - "mime-db": "1.40.0" + "mime-db": "1.45.0" } }, "mimic-fn": { @@ -7240,26 +7295,27 @@ "dev": true }, "mini-css-extract-plugin": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-0.6.0.tgz", - "integrity": "sha512-79q5P7YGI6rdnVyIAV4NXpBQJFWdkzJxCim3Kog4078fM0piAaFlwocqbejdWtLW1cEzCexPrh6EdyFsPgVdAw==", + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/mini-css-extract-plugin/-/mini-css-extract-plugin-0.9.0.tgz", + "integrity": "sha512-lp3GeY7ygcgAmVIcRPBVhIkf8Us7FZjA+ILpal44qLdSu11wmjKQ3d9k15lfD7pO4esu9eUIAW7qiYIBppv40A==", "dev": true, "requires": { "loader-utils": "^1.1.0", - "normalize-url": "^2.0.1", + "normalize-url": "1.9.1", "schema-utils": "^1.0.0", "webpack-sources": "^1.1.0" }, "dependencies": { "normalize-url": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-2.0.1.tgz", - "integrity": "sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw==", + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-1.9.1.tgz", + "integrity": "sha1-LMDWazHqIwNkWENuNiDYWVTGbDw=", "dev": true, "requires": { - "prepend-http": "^2.0.0", - "query-string": "^5.0.1", - "sort-keys": "^2.0.0" + "object-assign": "^4.0.1", + "prepend-http": "^1.0.0", + "query-string": "^4.1.0", + "sort-keys": "^1.0.0" } }, "schema-utils": { @@ -7297,11 +7353,55 @@ } }, "minimist": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz", - "integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz", + "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", "dev": true }, + "minipass": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", + "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + }, + "dependencies": { + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + } + } + }, + "minipass-collect": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", + "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "minipass-flush": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", + "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "minipass-pipeline": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", + "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, "mississippi": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/mississippi/-/mississippi-3.0.0.tgz", @@ -7342,20 +7442,12 @@ } }, "mkdirp": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", - "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", "dev": true, "requires": { - "minimist": "0.0.8" - }, - "dependencies": { - "minimist": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", - "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=", - "dev": true - } + "minimist": "^1.2.5" } }, "move-concurrently": { @@ -7395,9 +7487,9 @@ "dev": true }, "mute-stream": { - "version": "0.0.7", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz", - "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s=", + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", "dev": true }, "mz": { @@ -7411,13 +7503,6 @@ "thenify-all": "^1.0.0" } }, - "nan": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", - "integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==", - "dev": true, - "optional": true - }, "nanomatch": { "version": "1.2.13", "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", @@ -7450,9 +7535,9 @@ "dev": true }, "neo-async": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.1.tgz", - "integrity": "sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw==", + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", "dev": true }, "nice-try": { @@ -7471,20 +7556,20 @@ } }, "node-forge": { - "version": "0.7.5", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.7.5.tgz", - "integrity": "sha512-MmbQJ2MTESTjt3Gi/3yG1wGpIMhUfcIypUCGtTizFR9IiccFwxSpfp0vtIZlkFclEqERemxfnSdZEMR9VqqEFQ==", + "version": "0.10.0", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.10.0.tgz", + "integrity": "sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==", "dev": true }, "node-ipc": { - "version": "9.1.1", - "resolved": "https://registry.npmjs.org/node-ipc/-/node-ipc-9.1.1.tgz", - "integrity": "sha512-FAyICv0sIRJxVp3GW5fzgaf9jwwRQxAKDJlmNFUL5hOy+W4X/I5AypyHoq0DXXbo9o/gt79gj++4cMr4jVWE/w==", + "version": "9.1.3", + "resolved": "https://registry.npmjs.org/node-ipc/-/node-ipc-9.1.3.tgz", + "integrity": "sha512-8RS4RZyS/KMKKYG8mrje+cLxwATe9dBCuOiqKFSWND4oOuKytfuKCiR9yinvhoXF/nGdX/WnbywaUee+9U87zA==", "dev": true, "requires": { "event-pubsub": "4.3.0", - "js-message": "1.0.5", - "js-queue": "2.0.0" + "js-message": "1.0.7", + "js-queue": "2.0.2" } }, "node-libs-browser": { @@ -7527,13 +7612,10 @@ } }, "node-releases": { - "version": "1.1.25", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.25.tgz", - "integrity": "sha512-fI5BXuk83lKEoZDdH3gRhtsNgh05/wZacuXkgbiYkceE7+QIMXOg98n9ZV7mz27B+kFHnqHcUpscZZlGRSmTpQ==", - "dev": true, - "requires": { - "semver": "^5.3.0" - } + "version": "1.1.70", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.70.tgz", + "integrity": "sha512-Slf2s69+2/uAD79pVVQo8uSiC34+g8GWY8UH2Qtqv34ZfhYrxpYpfzs9Js9d6O0mbDmALuxaTlplnBTnSELcrw==", + "dev": true }, "normalize-package-data": { "version": "2.5.0", @@ -7583,18 +7665,35 @@ "boolbase": "~1.0.0" } }, + "null-loader": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/null-loader/-/null-loader-3.0.0.tgz", + "integrity": "sha512-hf5sNLl8xdRho4UPBOOeoIwT3WhjYcMUQm0zj44EhD6UscMAz72o2udpoDFBgykucdEDGIcd6SXbc/G6zssbzw==", + "dev": true, + "requires": { + "loader-utils": "^1.2.3", + "schema-utils": "^1.0.0" + }, + "dependencies": { + "schema-utils": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", + "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", + "dev": true, + "requires": { + "ajv": "^6.1.0", + "ajv-errors": "^1.0.0", + "ajv-keywords": "^3.1.0" + } + } + } + }, "num2fraction": { "version": "1.2.2", "resolved": "https://registry.npmjs.org/num2fraction/-/num2fraction-1.2.2.tgz", "integrity": "sha1-b2gragJ6Tp3fpFZM0lidHU5mnt4=", "dev": true }, - "number-is-nan": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", - "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", - "dev": true - }, "oauth-sign": { "version": "0.9.0", "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", @@ -7644,6 +7743,22 @@ "integrity": "sha512-OSuu/pU4ENM9kmREg0BdNrUDIl1heYa4mBZacJc+vVWz4GtAwu7jO8s4AIt2aGRUTqxykpWzI3Oqnsm13tTMDA==", "dev": true }, + "object-inspect": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.9.0.tgz", + "integrity": "sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==", + "dev": true + }, + "object-is": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.4.tgz", + "integrity": "sha512-1ZvAZ4wlF7IyPVOcE1Omikt7UpaFlOQq0HlSti+ZvDH3UiD2brwGMwDbyV43jao2bKJ+4+WdPJHSd7kgzKYVqg==", + "dev": true, + "requires": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" + } + }, "object-keys": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", @@ -7660,25 +7775,26 @@ } }, "object.assign": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", - "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz", + "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==", "dev": true, "requires": { - "define-properties": "^1.1.2", - "function-bind": "^1.1.1", - "has-symbols": "^1.0.0", - "object-keys": "^1.0.11" + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" } }, "object.getownpropertydescriptors": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.0.3.tgz", - "integrity": "sha1-h1jIRvW0B62rDyNuCYbxSwUcqhY=", + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.1.tgz", + "integrity": "sha512-6DtXgZ/lIZ9hqx4GtZETobXLR/ZLaa0aqV0kzbn80Rf8Z2e/XFnhA0I7p07N2wH8bBBltr2xQPi6sbKWAY2Eng==", "dev": true, "requires": { - "define-properties": "^1.1.2", - "es-abstract": "^1.5.1" + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "es-abstract": "^1.18.0-next.1" } }, "object.pick": { @@ -7691,14 +7807,14 @@ } }, "object.values": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.0.tgz", - "integrity": "sha512-8mf0nKLAoFX6VlNVdhGj31SVYpaNFtUnuoOXWyFEstsWRgU837AK+JYM0iAxwkSzGRbwn8cbFmgbyxj1j4VbXg==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.2.tgz", + "integrity": "sha512-MYC0jvJopr8EK6dPBiO8Nb9mvjdypOachO5REGk6MXzujbBrAisKo3HmdEI6kZDL6fC31Mwee/5YbtMebixeag==", "dev": true, "requires": { + "call-bind": "^1.0.0", "define-properties": "^1.1.3", - "es-abstract": "^1.12.0", - "function-bind": "^1.1.1", + "es-abstract": "^1.18.0-next.1", "has": "^1.0.3" } }, @@ -7751,9 +7867,9 @@ } }, "opener": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.1.tgz", - "integrity": "sha512-goYSy5c2UXE4Ra1xixabeVh1guIX/ZV/YokJksb6q2lubWu6UbvPQ20p542/sFIll1nl8JnCyK9oBaOcCWXwvA==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz", + "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==", "dev": true }, "opn": { @@ -7766,17 +7882,17 @@ } }, "optionator": { - "version": "0.8.2", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz", - "integrity": "sha1-NkxeQJ0/TWMB1sC0wFu6UBgK62Q=", + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", "dev": true, "requires": { "deep-is": "~0.1.3", - "fast-levenshtein": "~2.0.4", + "fast-levenshtein": "~2.0.6", "levn": "~0.3.0", "prelude-ls": "~1.1.2", "type-check": "~0.3.2", - "wordwrap": "~1.0.0" + "word-wrap": "~1.2.3" } }, "ora": { @@ -7791,6 +7907,17 @@ "log-symbols": "^2.2.0", "strip-ansi": "^5.2.0", "wcwidth": "^1.0.1" + }, + "dependencies": { + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } } }, "original": { @@ -7808,64 +7935,44 @@ "integrity": "sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=", "dev": true }, - "os-locale": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-3.1.0.tgz", - "integrity": "sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q==", - "dev": true, - "requires": { - "execa": "^1.0.0", - "lcid": "^2.0.0", - "mem": "^4.0.0" - } - }, "os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", "dev": true }, - "p-defer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-1.0.0.tgz", - "integrity": "sha1-n26xgvbJqozXQwBKfU+WsZaw+ww=", - "dev": true - }, "p-finally": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", "integrity": "sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=", "dev": true }, - "p-is-promise": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-2.1.0.tgz", - "integrity": "sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg==", - "dev": true - }, "p-limit": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", - "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, "requires": { - "p-try": "^1.0.0" + "p-try": "^2.0.0" } }, "p-locate": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", - "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", "dev": true, "requires": { - "p-limit": "^1.1.0" + "p-limit": "^2.2.0" } }, "p-map": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-2.1.0.tgz", - "integrity": "sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==", - "dev": true + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, + "requires": { + "aggregate-error": "^3.0.0" + } }, "p-retry": { "version": "3.0.1", @@ -7877,24 +7984,24 @@ } }, "p-try": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", - "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true }, "pako": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.10.tgz", - "integrity": "sha512-0DTvPVU3ed8+HNXOu5Bs+o//Mbdj9VNQMUOe9oKCwh8l0GNwpTDMKCWbRjgtD291AWnkAgkqA/LOnQS8AmS1tw==", + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", + "integrity": "sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw==", "dev": true }, "parallel-transform": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/parallel-transform/-/parallel-transform-1.1.0.tgz", - "integrity": "sha1-1BDwZbBdojCB/NEPKIVMKb2jOwY=", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/parallel-transform/-/parallel-transform-1.2.0.tgz", + "integrity": "sha512-P2vSmIu38uIlvdcU7fDkyrxj33gTUy/ABO5ZUbGowxNCopBq/OoD42bP4UmMrJoPyk4Uqf0mu3mtWBhHCZD8yg==", "dev": true, "requires": { - "cyclist": "~0.2.2", + "cyclist": "^1.0.1", "inherits": "^2.0.3", "readable-stream": "^2.1.5" } @@ -7926,35 +8033,53 @@ } }, "parse-asn1": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.4.tgz", - "integrity": "sha512-Qs5duJcuvNExRfFZ99HDD3z4mAi3r9Wl/FOjEOijlxwCZs7E7mW2vjTpgQ4J8LpTF8x5v+1Vn5UQFejmWT11aw==", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/parse-asn1/-/parse-asn1-5.1.6.tgz", + "integrity": "sha512-RnZRo1EPU6JBnra2vGHj0yhp6ebyjBZpmUCLHWiFhxlzvBCCpAuZ7elsBp1PVAbQN0/04VD/19rfzlBSwLstMw==", "dev": true, "requires": { - "asn1.js": "^4.0.0", + "asn1.js": "^5.2.0", "browserify-aes": "^1.0.0", - "create-hash": "^1.1.0", "evp_bytestokey": "^1.0.0", "pbkdf2": "^3.0.3", "safe-buffer": "^5.1.1" } }, "parse-json": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", - "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", "dev": true, "requires": { + "@babel/code-frame": "^7.0.0", "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1" + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" } }, "parse5": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-4.0.0.tgz", - "integrity": "sha512-VrZ7eOd3T1Fk4XWNXMgiGBK/z0MG48BWG2uQNU4I72fkQuKUTZpl+u9k+CxEG0twMVzSmXEEz12z5Fnw1jIQFA==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-5.1.1.tgz", + "integrity": "sha512-ugq4DFI0Ptb+WWjAdOK16+u/nHfiIrcE+sh8kZMaM0WllQKLI9rOUq6c2b7cwPkXdzfQESqvoqK6ug7U/Yyzug==", "dev": true }, + "parse5-htmlparser2-tree-adapter": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-6.0.1.tgz", + "integrity": "sha512-qPuWvbLgvDGilKc5BoicRovlT4MtYT6JfJyBOMDsKoiT+GiuP5qyrPCnR9HcPECIJJmZh5jRndyNThnhhb/vlA==", + "dev": true, + "requires": { + "parse5": "^6.0.1" + }, + "dependencies": { + "parse5": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz", + "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==", + "dev": true + } + } + }, "parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -7980,9 +8105,9 @@ "dev": true }, "path-exists": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", - "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", "dev": true }, "path-is-absolute": { @@ -8033,9 +8158,9 @@ } }, "pbkdf2": { - "version": "3.0.17", - "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.0.17.tgz", - "integrity": "sha512-U/il5MsrZp7mGg3mSQfn742na2T+1/vHDCG5/iTI3X9MKUuYUZVLQhyRsg06mCgDBTd57TxzgZt7P+fYfjRLtA==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/pbkdf2/-/pbkdf2-3.1.1.tgz", + "integrity": "sha512-4Ejy1OPxi9f2tt1rRV7Go7zmfDQ+ZectEQz3VGUQhgq62HtIRPDyG/JtnwIxs6x3uNMwo2V7q1fMvKjb+Tnpqg==", "dev": true, "requires": { "create-hash": "^1.1.2", @@ -8051,6 +8176,12 @@ "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=", "dev": true }, + "picomatch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", + "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "dev": true + }, "pify": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", @@ -8073,100 +8204,42 @@ } }, "pkg-dir": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", - "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", "dev": true, "requires": { - "find-up": "^3.0.0" - }, - "dependencies": { - "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "requires": { - "locate-path": "^3.0.0" - } - }, - "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - } - }, - "p-limit": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", - "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", - "dev": true, - "requires": { - "p-try": "^2.0.0" - } - }, - "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", - "dev": true, - "requires": { - "p-limit": "^2.0.0" - } - }, - "p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", - "dev": true - } + "find-up": "^4.0.0" } }, - "pkg-up": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/pkg-up/-/pkg-up-2.0.0.tgz", - "integrity": "sha1-yBmscoBZpGHKscOImivjxJoATX8=", + "pnp-webpack-plugin": { + "version": "1.6.4", + "resolved": "https://registry.npmjs.org/pnp-webpack-plugin/-/pnp-webpack-plugin-1.6.4.tgz", + "integrity": "sha512-7Wjy+9E3WwLOEL30D+m8TSTF7qJJUJLONBnwQp0518siuMxUQUbgZwssaFX+QKlZkjHZcw/IpZCt/H0srrntSg==", "dev": true, "requires": { - "find-up": "^2.1.0" + "ts-pnp": "^1.1.6" } }, - "pluralize": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/pluralize/-/pluralize-7.0.0.tgz", - "integrity": "sha512-ARhBOdzS3e41FbkW/XWrTEtukqqLoK5+Z/4UeDaLuSW+39JPeFgs4gCGqsrJHVZX0fUrx//4OF0K1CUGwlIFow==", - "dev": true, - "optional": true - }, "portfinder": { - "version": "1.0.21", - "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.21.tgz", - "integrity": "sha512-ESabpDCzmBS3ekHbmpAIiESq3udRsCBGiBZLsC+HgBKv2ezb0R4oG+7RnYEVZ/ZCfhel5Tx3UzdNWA0Lox2QCA==", + "version": "1.0.28", + "resolved": "https://registry.npmjs.org/portfinder/-/portfinder-1.0.28.tgz", + "integrity": "sha512-Se+2isanIcEqf2XMHjyUKskczxbPH7dQnlMjXX6+dybayyHvAf/TCgyMRlzf/B6QDhAEFOGes0pzRo3by4AbMA==", "dev": true, "requires": { - "async": "^1.5.2", - "debug": "^2.2.0", - "mkdirp": "0.5.x" + "async": "^2.6.2", + "debug": "^3.1.1", + "mkdirp": "^0.5.5" }, "dependencies": { "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, "requires": { - "ms": "2.0.0" + "ms": "^2.1.1" } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", - "dev": true } } }, @@ -8177,9 +8250,9 @@ "dev": true }, "postcss": { - "version": "7.0.17", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.17.tgz", - "integrity": "sha512-546ZowA+KZ3OasvQZHsbuEpysvwTZNGJv9EfyCQdsIDltPSWHAeTQ5fQy/Npi2ZDtLI3zs7Ps/p6wThErhm9fQ==", + "version": "7.0.35", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-7.0.35.tgz", + "integrity": "sha512-3QT8bBJeX/S5zKTTjTCIjRF3If4avAT6kqxcASlTWEtAFCb9NH0OUxNDfgZSWdP5fJnBYCMEWkIFfWeugjzYMg==", "dev": true, "requires": { "chalk": "^2.4.2", @@ -8205,15 +8278,14 @@ } }, "postcss-calc": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-7.0.1.tgz", - "integrity": "sha512-oXqx0m6tb4N3JGdmeMSc/i91KppbYsFZKdH0xMOqK8V1rJlzrKlTdokz8ozUXLVejydRN6u2IddxpcijRj2FqQ==", + "version": "7.0.5", + "resolved": "https://registry.npmjs.org/postcss-calc/-/postcss-calc-7.0.5.tgz", + "integrity": "sha512-1tKHutbGtLtEZF6PT4JSihCHfIVldU72mZ8SdZHIYriIZ9fh9k9aWSppaT8rHsyI3dX+KSR+W+Ix9BMY3AODrg==", "dev": true, "requires": { - "css-unit-converter": "^1.1.1", - "postcss": "^7.0.5", - "postcss-selector-parser": "^5.0.0-rc.4", - "postcss-value-parser": "^3.3.1" + "postcss": "^7.0.27", + "postcss-selector-parser": "^6.0.2", + "postcss-value-parser": "^4.0.2" } }, "postcss-colormin": { @@ -8227,6 +8299,14 @@ "has": "^1.0.0", "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-convert-values": { @@ -8237,6 +8317,14 @@ "requires": { "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-discard-comments": { @@ -8276,9 +8364,9 @@ } }, "postcss-load-config": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-2.1.0.tgz", - "integrity": "sha512-4pV3JJVPLd5+RueiVVB+gFOAa7GWc25XQcMp86Zexzke69mKf6Nx9LRcQywdz7yZI9n1udOxmLuAwTBypypF8Q==", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-2.1.2.tgz", + "integrity": "sha512-/rDeGV6vMUo3mwJZmeHfEDvwnTKKqQ0S7OHUi/kJvvtx3aWtyWG2/0ZWnzCt2keEclwN6Tf0DST2v9kITdOKYw==", "dev": true, "requires": { "cosmiconfig": "^5.0.0", @@ -8320,6 +8408,14 @@ "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0", "stylehacks": "^4.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-merge-rules": { @@ -8337,12 +8433,12 @@ }, "dependencies": { "postcss-selector-parser": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.1.tgz", - "integrity": "sha1-T4dfSvsMllc9XPTXQBGu4lCn6GU=", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.2.tgz", + "integrity": "sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA==", "dev": true, "requires": { - "dot-prop": "^4.1.1", + "dot-prop": "^5.2.0", "indexes-of": "^1.0.1", "uniq": "^1.0.1" } @@ -8357,6 +8453,14 @@ "requires": { "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-minify-gradients": { @@ -8369,6 +8473,14 @@ "is-color-stop": "^1.0.0", "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-minify-params": { @@ -8383,6 +8495,14 @@ "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0", "uniqs": "^2.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-minify-selectors": { @@ -8398,12 +8518,12 @@ }, "dependencies": { "postcss-selector-parser": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.1.tgz", - "integrity": "sha1-T4dfSvsMllc9XPTXQBGu4lCn6GU=", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.2.tgz", + "integrity": "sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA==", "dev": true, "requires": { - "dot-prop": "^4.1.1", + "dot-prop": "^5.2.0", "indexes-of": "^1.0.1", "uniq": "^1.0.1" } @@ -8411,118 +8531,44 @@ } }, "postcss-modules-extract-imports": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-1.2.1.tgz", - "integrity": "sha512-6jt9XZwUhwmRUhb/CkyJY020PYaPJsCyt3UjbaWo6XEbH/94Hmv6MP7fG2C5NDU/BcHzyGYxNtHvM+LTf9HrYw==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-extract-imports/-/postcss-modules-extract-imports-2.0.0.tgz", + "integrity": "sha512-LaYLDNS4SG8Q5WAWqIJgdHPJrDDr/Lv775rMBFUbgjTz6j34lUznACHcdRWroPvXANP2Vj7yNK57vp9eFqzLWQ==", "dev": true, "requires": { - "postcss": "^6.0.1" - }, - "dependencies": { - "postcss": { - "version": "6.0.23", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", - "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", - "dev": true, - "requires": { - "chalk": "^2.4.1", - "source-map": "^0.6.1", - "supports-color": "^5.4.0" - } - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true - } + "postcss": "^7.0.5" } }, "postcss-modules-local-by-default": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-1.2.0.tgz", - "integrity": "sha1-99gMOYxaOT+nlkRmvRlQCn1hwGk=", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/postcss-modules-local-by-default/-/postcss-modules-local-by-default-3.0.3.tgz", + "integrity": "sha512-e3xDq+LotiGesympRlKNgaJ0PCzoUIdpH0dj47iWAui/kyTgh3CiAr1qP54uodmJhl6p9rN6BoNcdEDVJx9RDw==", "dev": true, "requires": { - "css-selector-tokenizer": "^0.7.0", - "postcss": "^6.0.1" - }, - "dependencies": { - "postcss": { - "version": "6.0.23", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", - "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", - "dev": true, - "requires": { - "chalk": "^2.4.1", - "source-map": "^0.6.1", - "supports-color": "^5.4.0" - } - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true - } + "icss-utils": "^4.1.1", + "postcss": "^7.0.32", + "postcss-selector-parser": "^6.0.2", + "postcss-value-parser": "^4.1.0" } }, "postcss-modules-scope": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-1.1.0.tgz", - "integrity": "sha1-1upkmUx5+XtipytCb75gVqGUu5A=", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/postcss-modules-scope/-/postcss-modules-scope-2.2.0.tgz", + "integrity": "sha512-YyEgsTMRpNd+HmyC7H/mh3y+MeFWevy7V1evVhJWewmMbjDHIbZbOXICC2y+m1xI1UVfIT1HMW/O04Hxyu9oXQ==", "dev": true, "requires": { - "css-selector-tokenizer": "^0.7.0", - "postcss": "^6.0.1" - }, - "dependencies": { - "postcss": { - "version": "6.0.23", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", - "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", - "dev": true, - "requires": { - "chalk": "^2.4.1", - "source-map": "^0.6.1", - "supports-color": "^5.4.0" - } - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true - } + "postcss": "^7.0.6", + "postcss-selector-parser": "^6.0.0" } }, "postcss-modules-values": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-1.3.0.tgz", - "integrity": "sha1-7P+p1+GSUYOJ9CrQ6D9yrsRW6iA=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/postcss-modules-values/-/postcss-modules-values-3.0.0.tgz", + "integrity": "sha512-1//E5jCBrZ9DmRX+zCtmQtRSV6PV42Ix7Bzj9GbwJceduuf7IqP8MgeTXuRDHOWj2m0VzZD5+roFWDuU8RQjcg==", "dev": true, "requires": { - "icss-replace-symbols": "^1.1.0", - "postcss": "^6.0.1" - }, - "dependencies": { - "postcss": { - "version": "6.0.23", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-6.0.23.tgz", - "integrity": "sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag==", - "dev": true, - "requires": { - "chalk": "^2.4.1", - "source-map": "^0.6.1", - "supports-color": "^5.4.0" - } - }, - "source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true - } + "icss-utils": "^4.0.0", + "postcss": "^7.0.6" } }, "postcss-normalize-charset": { @@ -8543,6 +8589,14 @@ "cssnano-util-get-match": "^4.0.0", "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-normalize-positions": { @@ -8555,6 +8609,14 @@ "has": "^1.0.0", "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-normalize-repeat-style": { @@ -8567,6 +8629,14 @@ "cssnano-util-get-match": "^4.0.0", "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-normalize-string": { @@ -8578,6 +8648,14 @@ "has": "^1.0.0", "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-normalize-timing-functions": { @@ -8589,6 +8667,14 @@ "cssnano-util-get-match": "^4.0.0", "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-normalize-unicode": { @@ -8600,6 +8686,14 @@ "browserslist": "^4.0.0", "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-normalize-url": { @@ -8612,6 +8706,14 @@ "normalize-url": "^3.0.0", "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-normalize-whitespace": { @@ -8622,6 +8724,14 @@ "requires": { "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-ordered-values": { @@ -8633,6 +8743,14 @@ "cssnano-util-get-arguments": "^4.0.0", "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-reduce-initial": { @@ -8657,17 +8775,26 @@ "has": "^1.0.0", "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-selector-parser": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-5.0.0.tgz", - "integrity": "sha512-w+zLE5Jhg6Liz8+rQOWEAwtwkyqpfnmsinXjXg6cY7YIONZZtgvE0v2O0uhQBs0peNomOJwWRKt6JBfTdTd3OQ==", + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.4.tgz", + "integrity": "sha512-gjMeXBempyInaBqpp8gODmwZ52WaYsVOsfr4L4lDQ7n3ncD6mEyySiDtgzCT+NYC0mmeOLvtsF8iaEf0YT6dBw==", "dev": true, "requires": { - "cssesc": "^2.0.0", + "cssesc": "^3.0.0", "indexes-of": "^1.0.1", - "uniq": "^1.0.1" + "uniq": "^1.0.1", + "util-deprecate": "^1.0.2" } }, "postcss-svgo": { @@ -8680,6 +8807,14 @@ "postcss": "^7.0.0", "postcss-value-parser": "^3.0.0", "svgo": "^1.0.0" + }, + "dependencies": { + "postcss-value-parser": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", + "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "dev": true + } } }, "postcss-unique-selectors": { @@ -8694,9 +8829,9 @@ } }, "postcss-value-parser": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz", - "integrity": "sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz", + "integrity": "sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ==", "dev": true }, "prelude-ls": { @@ -8706,33 +8841,28 @@ "dev": true }, "prepend-http": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", - "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-1.0.4.tgz", + "integrity": "sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=", "dev": true }, "prettier": { - "version": "1.16.3", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.16.3.tgz", - "integrity": "sha512-kn/GU6SMRYPxUakNXhpP0EedT/KmaPzr0H5lIsDogrykbaxOpOfAFfk5XA7DZrJyMAv1wlMV3CPcZruGXVVUZw==", - "dev": true + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", + "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "dev": true, + "optional": true }, "pretty-error": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/pretty-error/-/pretty-error-2.1.1.tgz", - "integrity": "sha1-X0+HyPkeWuPzuoerTPXgOxoX8aM=", + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/pretty-error/-/pretty-error-2.1.2.tgz", + "integrity": "sha512-EY5oDzmsX5wvuynAByrmY0P0hcp+QpnAKbJng2A2MPjVKXCxrDSUkzghVJ4ZGPIv+JC4gX8fPUWscC0RtjsWGw==", "dev": true, "requires": { - "renderkid": "^2.0.1", - "utila": "~0.4" + "lodash": "^4.17.20", + "renderkid": "^2.0.4" } }, - "private": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/private/-/private-0.1.8.tgz", - "integrity": "sha512-VvivMrbvd2nKkiG38qjULzlc+4Vx4wm/whI9pQD35YrARNnhxeiRktSOhSukRLFNlzg6Br/cJPet5J/u19r/mg==", - "dev": true - }, "process": { "version": "0.11.10", "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", @@ -8758,13 +8888,13 @@ "dev": true }, "proxy-addr": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.5.tgz", - "integrity": "sha512-t/7RxHXPH6cJtP0pRG6smSr9QJidhB+3kXu0KgXnbGYMgzEnUxRQ4/LDdfOwZEMyIh3/xHb8PX3t+lfL9z+YVQ==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz", + "integrity": "sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==", "dev": true, "requires": { "forwarded": "~0.1.2", - "ipaddr.js": "1.9.0" + "ipaddr.js": "1.9.1" } }, "prr": { @@ -8780,9 +8910,9 @@ "dev": true }, "psl": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/psl/-/psl-1.2.0.tgz", - "integrity": "sha512-GEn74ZffufCmkDDLNcl3uuyF/aSD6exEyh1v/ZSdAomB82t6G9hzJVRx0jBmLDW+VfZqks3aScmMw9DszwUalA==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", + "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==", "dev": true }, "public-encrypt": { @@ -8797,6 +8927,14 @@ "parse-asn1": "^5.0.0", "randombytes": "^2.0.1", "safe-buffer": "^5.1.2" + }, + "dependencies": { + "bn.js": { + "version": "4.11.9", + "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.9.tgz", + "integrity": "sha512-E6QoYqCKZfgatHTdHzs1RRKP7ip4vvm+EyRUeE2RF0NblwVvb0p6jSVeNTOFxPn26QXN2o6SMfNxKp6kU8zQaw==", + "dev": true + } } }, "pump": { @@ -8851,12 +8989,11 @@ "dev": true }, "query-string": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/query-string/-/query-string-5.1.1.tgz", - "integrity": "sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/query-string/-/query-string-4.3.4.tgz", + "integrity": "sha1-u7aTucqRXCMlFbIosaArYJBD2+s=", "dev": true, "requires": { - "decode-uri-component": "^0.2.0", "object-assign": "^4.1.0", "strict-uri-encode": "^1.0.0" } @@ -8874,9 +9011,9 @@ "dev": true }, "querystringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.1.1.tgz", - "integrity": "sha512-w7fLxIRCRT7U8Qu53jQnJyPkYZIaR4n5151KMfcJlO/A9397Wxb1amJvROTK6TOnp7PfoAmg/qXiNHI+08jRfA==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==", "dev": true }, "randombytes": { @@ -8926,26 +9063,12 @@ "normalize-package-data": "^2.5.0", "parse-json": "^5.0.0", "type-fest": "^0.6.0" - }, - "dependencies": { - "parse-json": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.0.0.tgz", - "integrity": "sha512-OOY5b7PAEFV0E2Fir1KOkxchnZNCdowAJgQ5NuxjpBKTRP3pQhwkrkxqQjeoKJ+fO7bCpmIZaogI4eZGDMEGOw==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-better-errors": "^1.0.1", - "lines-and-columns": "^1.1.6" - } - } } }, "readable-stream": { - "version": "2.3.6", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz", - "integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==", + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", "dev": true, "requires": { "core-util-is": "~1.0.0", @@ -8958,44 +9081,51 @@ } }, "readdirp": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", - "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz", + "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==", "dev": true, "requires": { - "graceful-fs": "^4.1.11", - "micromatch": "^3.1.10", - "readable-stream": "^2.0.2" + "picomatch": "^2.2.1" + } + }, + "rechoir": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=", + "dev": true, + "requires": { + "resolve": "^1.1.6" } }, "regenerate": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.0.tgz", - "integrity": "sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg==", + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", + "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==", "dev": true }, "regenerate-unicode-properties": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-8.1.0.tgz", - "integrity": "sha512-LGZzkgtLY79GeXLm8Dp0BVLdQlWICzBnJz/ipWUgo59qBaZ+BHtq51P2q1uVZlppMuUAT37SDk39qUbjTWB7bA==", + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-8.2.0.tgz", + "integrity": "sha512-F9DjY1vKLo/tPePDycuH3dn9H1OTPIkVD9Kz4LODu+F2C75mgjAJ7x/gwy6ZcSNRAAkhNlJSOHRe8k3p+K9WhA==", "dev": true, "requires": { "regenerate": "^1.4.0" } }, "regenerator-runtime": { - "version": "0.13.2", - "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.2.tgz", - "integrity": "sha512-S/TQAZJO+D3m9xeN1WTI8dLKBBiRgXBlTJvbWjCThHWZj9EvHK70Ff50/tYj2J/fvBY6JtFVwRuazHN2E7M9BA==", + "version": "0.13.7", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz", + "integrity": "sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew==", "dev": true }, "regenerator-transform": { - "version": "0.14.0", - "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.14.0.tgz", - "integrity": "sha512-rtOelq4Cawlbmq9xuMR5gdFmv7ku/sFoB7sRiywx7aq53bc52b4j6zvH7Te1Vt/X2YveDKnCGUbioieU7FEL3w==", + "version": "0.14.5", + "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.14.5.tgz", + "integrity": "sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw==", "dev": true, "requires": { - "private": "^0.1.6" + "@babel/runtime": "^7.8.4" } }, "regex-not": { @@ -9008,43 +9138,46 @@ "safe-regex": "^1.1.0" } }, - "regexp-tree": { - "version": "0.1.11", - "resolved": "https://registry.npmjs.org/regexp-tree/-/regexp-tree-0.1.11.tgz", - "integrity": "sha512-7/l/DgapVVDzZobwMCCgMlqiqyLFJ0cduo/j+3BcDJIB+yJdsYCfKuI3l/04NV+H/rfNRdPIDbXNZHM9XvQatg==", - "dev": true + "regexp.prototype.flags": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.3.1.tgz", + "integrity": "sha512-JiBdRBq91WlY7uRJ0ds7R+dU02i6LKi8r3BuQhNXn+kmeLN+EfHhfjqMRis1zJxnlu88hq/4dx0P2OP3APRTOA==", + "dev": true, + "requires": { + "call-bind": "^1.0.2", + "define-properties": "^1.1.3" + } }, "regexpp": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-1.1.0.tgz", - "integrity": "sha512-LOPw8FpgdQF9etWMaAfG/WRthIdXJGYp4mJ2Jgn/2lpkbod9jPn0t9UqN7AxBOKNfzRbYyVfgc7Vk4t/MpnXgw==", - "dev": true, - "optional": true + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "dev": true }, "regexpu-core": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-4.5.4.tgz", - "integrity": "sha512-BtizvGtFQKGPUcTy56o3nk1bGRp4SZOTYrDtGNlqCQufptV5IkkLN6Emw+yunAJjzf+C9FQFtvq7IoA3+oMYHQ==", + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-4.7.1.tgz", + "integrity": "sha512-ywH2VUraA44DZQuRKzARmw6S66mr48pQVva4LBeRhcOltJ6hExvWly5ZjFLYo67xbIxb6W1q4bAGtgfEl20zfQ==", "dev": true, "requires": { "regenerate": "^1.4.0", - "regenerate-unicode-properties": "^8.0.2", - "regjsgen": "^0.5.0", - "regjsparser": "^0.6.0", + "regenerate-unicode-properties": "^8.2.0", + "regjsgen": "^0.5.1", + "regjsparser": "^0.6.4", "unicode-match-property-ecmascript": "^1.0.4", - "unicode-match-property-value-ecmascript": "^1.1.0" + "unicode-match-property-value-ecmascript": "^1.2.0" } }, "regjsgen": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.5.0.tgz", - "integrity": "sha512-RnIrLhrXCX5ow/E5/Mh2O4e/oa1/jW0eaBKTSy3LaCj+M3Bqvm97GWDp2yUtzIs4LEn65zR2yiYGFqb2ApnzDA==", + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/regjsgen/-/regjsgen-0.5.2.tgz", + "integrity": "sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A==", "dev": true }, "regjsparser": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.6.0.tgz", - "integrity": "sha512-RQ7YyokLiQBomUJuUG8iGVvkgOLxwyZM8k6d3q5SAXpg4r5TZJZigKFvC6PpD+qQ98bCDC5YelPeA3EucDoNeQ==", + "version": "0.6.7", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.6.7.tgz", + "integrity": "sha512-ib77G0uxsA2ovgiYbCVGx4Pv3PSttAx2vIwidqQzbL2U5S4Q+j00HdSAneSBuyVcMvEnTXMjiGgB+DlXozVhpQ==", "dev": true, "requires": { "jsesc": "~0.5.0" @@ -9071,16 +9204,16 @@ "dev": true }, "renderkid": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-2.0.3.tgz", - "integrity": "sha512-z8CLQp7EZBPCwCnncgf9C4XAi3WR0dv+uWu/PjIyhhAb5d6IJ/QZqlHFprHeKT+59//V6BNUsLbvN8+2LarxGA==", + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/renderkid/-/renderkid-2.0.5.tgz", + "integrity": "sha512-ccqoLg+HLOHq1vdfYNm4TBeaCDIi1FLt3wGojTDSvdewUv65oTmI3cnT2E4hRjl1gzKZIPK+KZrXzlUYKnR+vQ==", "dev": true, "requires": { - "css-select": "^1.1.0", + "css-select": "^2.0.2", "dom-converter": "^0.2", - "htmlparser2": "^3.3.0", - "strip-ansi": "^3.0.0", - "utila": "^0.4.0" + "htmlparser2": "^3.10.1", + "lodash": "^4.17.20", + "strip-ansi": "^3.0.0" }, "dependencies": { "ansi-regex": { @@ -9089,28 +9222,6 @@ "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", "dev": true }, - "css-select": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-1.2.0.tgz", - "integrity": "sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg=", - "dev": true, - "requires": { - "boolbase": "~1.0.0", - "css-what": "2.1", - "domutils": "1.5.1", - "nth-check": "~1.0.1" - } - }, - "domutils": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-1.5.1.tgz", - "integrity": "sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8=", - "dev": true, - "requires": { - "dom-serializer": "0", - "domelementtype": "1" - } - }, "strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", @@ -9135,9 +9246,9 @@ "dev": true }, "request": { - "version": "2.88.0", - "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", - "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", + "version": "2.88.2", + "resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz", + "integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==", "dev": true, "requires": { "aws-sign2": "~0.7.0", @@ -9147,7 +9258,7 @@ "extend": "~3.0.2", "forever-agent": "~0.6.1", "form-data": "~2.3.2", - "har-validator": "~5.1.0", + "har-validator": "~5.1.3", "http-signature": "~1.2.0", "is-typedarray": "~1.0.0", "isstream": "~0.1.2", @@ -9157,31 +9268,11 @@ "performance-now": "^2.1.0", "qs": "~6.5.2", "safe-buffer": "^5.1.2", - "tough-cookie": "~2.4.3", + "tough-cookie": "~2.5.0", "tunnel-agent": "^0.6.0", "uuid": "^3.3.2" } }, - "request-promise-core": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.2.tgz", - "integrity": "sha512-UHYyq1MO8GsefGEt7EprS8UrXsm1TxEvFUX1IMTuSLU2Rh7fTIdFtl8xD7JiEYiWU2dl+NYAjCTksTehQUxPag==", - "dev": true, - "requires": { - "lodash": "^4.17.11" - } - }, - "request-promise-native": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/request-promise-native/-/request-promise-native-1.0.7.tgz", - "integrity": "sha512-rIMnbBdgNViL37nZ1b3L/VfPOpSi0TqVDQPAvO6U14lMzOLrt5nilxCQqtDKhZeDiW0/hkCXGoQjhgJd/tCh6w==", - "dev": true, - "requires": { - "request-promise-core": "1.1.2", - "stealthy-require": "^1.1.1", - "tough-cookie": "^2.3.3" - } - }, "require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -9194,35 +9285,19 @@ "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", "dev": true }, - "require-uncached": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/require-uncached/-/require-uncached-1.0.3.tgz", - "integrity": "sha1-Tg1W1slmL9MeQwEcS5WqSZVUIdM=", - "dev": true, - "optional": true, - "requires": { - "caller-path": "^0.1.0", - "resolve-from": "^1.0.0" - } - }, "requires-port": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", "integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=", "dev": true }, - "reselect": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/reselect/-/reselect-3.0.1.tgz", - "integrity": "sha1-79qpjqdFEyTQkrKyFjpqHXqaIUc=", - "dev": true - }, "resolve": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.11.1.tgz", - "integrity": "sha512-vIpgF6wfuJOZI7KKKSP+HmiKggadPQAdsp5HiC1mvqnfp0gF1vdwgBWZIdrVft9pgqoMFQN+R7BSWZiBxx+BBw==", + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.19.0.tgz", + "integrity": "sha512-rArEXAgsBG4UgRGcynxWIWKFvh/XZCcS8UJdHhwy91zwAvCZIbcs+vAbflgBnNjYMs/i/i+/Ux6IZhML1yPvxg==", "dev": true, "requires": { + "is-core-module": "^2.1.0", "path-parse": "^1.0.6" } }, @@ -9233,22 +9308,13 @@ "dev": true, "requires": { "resolve-from": "^3.0.0" - }, - "dependencies": { - "resolve-from": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", - "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=", - "dev": true - } } }, "resolve-from": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-1.0.1.tgz", - "integrity": "sha1-Jsv+k10a7uq7Kbw/5a6wHpPUQiY=", - "dev": true, - "optional": true + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-3.0.0.tgz", + "integrity": "sha1-six699nWiBvItuZTM17rywoYh0g=", + "dev": true }, "resolve-url": { "version": "0.2.1", @@ -9291,9 +9357,9 @@ "dev": true }, "rimraf": { - "version": "2.6.3", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz", - "integrity": "sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA==", + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", "dev": true, "requires": { "glob": "^7.1.3" @@ -9310,13 +9376,10 @@ } }, "run-async": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", - "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", - "dev": true, - "requires": { - "is-promise": "^2.1.0" - } + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz", + "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ==", + "dev": true }, "run-queue": { "version": "1.0.3", @@ -9327,27 +9390,10 @@ "aproba": "^1.1.1" } }, - "rx-lite": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/rx-lite/-/rx-lite-4.0.8.tgz", - "integrity": "sha1-Cx4Rr4vESDbwSmQH6S2kJGe3lEQ=", - "dev": true, - "optional": true - }, - "rx-lite-aggregates": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz", - "integrity": "sha1-dTuHqJoRyVRnxKwWJsTvxOBcZ74=", - "dev": true, - "optional": true, - "requires": { - "rx-lite": "*" - } - }, "rxjs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.2.tgz", - "integrity": "sha512-HUb7j3kvb7p7eCUHE3FqjoDsC1xfZQ4AHFWfTKSpZ+sAhhz5X1WX0ZuUqWbzB2QhSLp3DoLUG+hMdEDKqWo2Zg==", + "version": "6.6.3", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.3.tgz", + "integrity": "sha512-trsQc+xYYXZ3urjOiJOuCOa5N3jAZ3eiSpQB5hIT8zGlL2QfnHLJ2r7GMkBGuIausdJN1OneaI6gQlsqNHHmZQ==", "dev": true, "requires": { "tslib": "^1.9.0" @@ -9374,6 +9420,76 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "dev": true }, + "sass": { + "version": "1.32.6", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.32.6.tgz", + "integrity": "sha512-1bcDHDcSqeFtMr0JXI3xc/CXX6c4p0wHHivJdru8W7waM7a1WjKMm4m/Z5sY7CbVw4Whi2Chpcw6DFfSWwGLzQ==", + "dev": true, + "requires": { + "chokidar": ">=2.0.0 <4.0.0" + } + }, + "sass-loader": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/sass-loader/-/sass-loader-10.1.1.tgz", + "integrity": "sha512-W6gVDXAd5hR/WHsPicvZdjAWHBcEJ44UahgxcIE196fW2ong0ZHMPO1kZuI5q0VlvMQZh32gpv69PLWQm70qrw==", + "dev": true, + "requires": { + "klona": "^2.0.4", + "loader-utils": "^2.0.0", + "neo-async": "^2.6.2", + "schema-utils": "^3.0.0", + "semver": "^7.3.2" + }, + "dependencies": { + "loader-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.0.tgz", + "integrity": "sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ==", + "dev": true, + "requires": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "schema-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.0.0.tgz", + "integrity": "sha512-6D82/xSzO094ajanoOSbe4YvXWMfn2A//8Y1+MUqFAJul5Bs+yn36xbK9OtNDcRVSBJ9jjeoXftM6CfztsjOAA==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.6", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + } + }, + "semver": { + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz", + "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + } + } + }, "sax": { "version": "1.2.4", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", @@ -9381,13 +9497,14 @@ "dev": true }, "schema-utils": { - "version": "0.4.7", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-0.4.7.tgz", - "integrity": "sha512-v/iwU6wvwGK8HbU9yi3/nhGzP0yGSuhQMzL6ySiec1FSrZZDkhm4noOSWzrNFo/jEc+SJY6jRTwuwbSXJPDUnQ==", + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.1.tgz", + "integrity": "sha512-SHiNtMOUGWBQJwzISiVYKu82GiV4QYGePp3odlY1tuKO7gPtphAT5R/py0fA6xtbgLL/RvtJZnU9b8s0F1q0Xg==", "dev": true, "requires": { - "ajv": "^6.1.0", - "ajv-keywords": "^3.1.0" + "@types/json-schema": "^7.0.5", + "ajv": "^6.12.4", + "ajv-keywords": "^3.5.2" } }, "select-hose": { @@ -9397,18 +9514,18 @@ "dev": true }, "selfsigned": { - "version": "1.10.4", - "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-1.10.4.tgz", - "integrity": "sha512-9AukTiDmHXGXWtWjembZ5NDmVvP2695EtpgbCsxCa68w3c88B+alqbmZ4O3hZ4VWGXeGWzEVdvqgAJD8DQPCDw==", + "version": "1.10.8", + "resolved": "https://registry.npmjs.org/selfsigned/-/selfsigned-1.10.8.tgz", + "integrity": "sha512-2P4PtieJeEwVgTU9QEcwIRDQ/mXJLX8/+I3ur+Pg16nS8oNbrGxEso9NyYWy8NAmXiNl4dlAp5MwoNeCWzON4w==", "dev": true, "requires": { - "node-forge": "0.7.5" + "node-forge": "^0.10.0" } }, "semver": { - "version": "5.7.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.0.tgz", - "integrity": "sha512-Ya52jSX2u7QKghxeoFGpLwCtGlt7j0oY9DYb5apt9nPlJ42ID+ulTXESnt/qAQcoSERyZ5sl3LDIOw0nAn/5DA==", + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", "dev": true }, "send": { @@ -9464,10 +9581,13 @@ } }, "serialize-javascript": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-1.7.0.tgz", - "integrity": "sha512-ke8UG8ulpFOxO8f8gRYabHQe/ZntKlcig2Mp+8+URDP1D8vJZ0KUt7LYo07q25Z/+JVSgpr/cui9PIp5H6/+nA==", - "dev": true + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-4.0.0.tgz", + "integrity": "sha512-GaNA54380uFefWghODBWEGisLZFj00nS5ACs6yHa9nLqlLpVLO8ChDGeKRjZnV4Nh4n0Qi7nhYZD/9fCPzEqkw==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } }, "serve-index": { "version": "1.9.1", @@ -9604,21 +9724,26 @@ "dev": true }, "shell-quote": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.6.1.tgz", - "integrity": "sha1-9HgZSczkAmlxJ0MOo7PFR29IF2c=", + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.2.tgz", + "integrity": "sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg==", + "dev": true + }, + "shelljs": { + "version": "0.8.4", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.4.tgz", + "integrity": "sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ==", "dev": true, "requires": { - "array-filter": "~0.0.0", - "array-map": "~0.0.0", - "array-reduce": "~0.0.0", - "jsonify": "~0.0.0" + "glob": "^7.0.0", + "interpret": "^1.0.0", + "rechoir": "^0.6.2" } }, "signal-exit": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz", - "integrity": "sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", + "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", "dev": true }, "simple-swizzle": { @@ -9645,13 +9770,22 @@ "dev": true }, "slice-ansi": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-1.0.0.tgz", - "integrity": "sha512-POqxBK6Lb3q6s047D/XsDVNPnF9Dl8JSaqe9h9lURl0OdNqy/ujDrOiIHtsqXMGbWWTIomRzAMaTyawAU//Reg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", + "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", "dev": true, - "optional": true, "requires": { + "ansi-styles": "^3.2.0", + "astral-regex": "^1.0.0", "is-fullwidth-code-point": "^2.0.0" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + } } }, "snapdragon": { @@ -9777,53 +9911,45 @@ } }, "sockjs": { - "version": "0.3.19", - "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.19.tgz", - "integrity": "sha512-V48klKZl8T6MzatbLlzzRNhMepEys9Y4oGFpypBFFn1gLI/QQ9HtLLyWJNbPlwGLelOVOEijUbTTJeLLI59jLw==", + "version": "0.3.21", + "resolved": "https://registry.npmjs.org/sockjs/-/sockjs-0.3.21.tgz", + "integrity": "sha512-DhbPFGpxjc6Z3I+uX07Id5ZO2XwYsWOrYjaSeieES78cq+JaJvVe5q/m1uvjIQhXinhIeCFRH6JgXe+mvVMyXw==", "dev": true, "requires": { - "faye-websocket": "^0.10.0", - "uuid": "^3.0.1" + "faye-websocket": "^0.11.3", + "uuid": "^3.4.0", + "websocket-driver": "^0.7.4" } }, "sockjs-client": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.3.0.tgz", - "integrity": "sha512-R9jxEzhnnrdxLCNln0xg5uGHqMnkhPSTzUZH2eXcR03S/On9Yvoq2wyUZILRUhZCNVu2PmwWVoyuiPz8th8zbg==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/sockjs-client/-/sockjs-client-1.5.0.tgz", + "integrity": "sha512-8Dt3BDi4FYNrCFGTL/HtwVzkARrENdwOUf1ZoW/9p3M8lZdFT35jVdrHza+qgxuG9H3/shR4cuX/X9umUrjP8Q==", "dev": true, "requires": { - "debug": "^3.2.5", + "debug": "^3.2.6", "eventsource": "^1.0.7", - "faye-websocket": "~0.11.1", - "inherits": "^2.0.3", - "json3": "^3.3.2", - "url-parse": "^1.4.3" + "faye-websocket": "^0.11.3", + "inherits": "^2.0.4", + "json3": "^3.3.3", + "url-parse": "^1.4.7" }, "dependencies": { "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", "dev": true, "requires": { "ms": "^2.1.1" } - }, - "faye-websocket": { - "version": "0.11.3", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.3.tgz", - "integrity": "sha512-D2y4bovYpzziGgbHYtGCMjlJM36vAl/y+xUyn1C+FVx8szd1E+86KwVw6XvYSzOP8iMpm1X0I4xJD+QtUb36OA==", - "dev": true, - "requires": { - "websocket-driver": ">=0.5.1" - } } } }, "sort-keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-2.0.0.tgz", - "integrity": "sha1-ZYU1WEhh7JfXMNbPQYIuH1ZoQSg=", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", + "integrity": "sha1-RBttTTRnmPG05J6JIK37oOVD+a0=", "dev": true, "requires": { "is-plain-obj": "^1.0.0" @@ -9842,12 +9968,12 @@ "dev": true }, "source-map-resolve": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.2.tgz", - "integrity": "sha512-MjqsvNwyz1s0k81Goz/9vRBe9SZdB09Bdw+/zYyO+3CuPk6fouTaxscHkgtE8jKvf01kVfl8riHzERQ/kefaSA==", + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz", + "integrity": "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==", "dev": true, "requires": { - "atob": "^2.1.1", + "atob": "^2.1.2", "decode-uri-component": "^0.2.0", "resolve-url": "^0.2.1", "source-map-url": "^0.4.0", @@ -9855,9 +9981,9 @@ } }, "source-map-support": { - "version": "0.5.12", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.12.tgz", - "integrity": "sha512-4h2Pbvyy15EE02G+JOZpUCmqWJuqrs+sEkzewTm++BPi7Hvn/HwcqLAcNxYAyI0x13CpPPn+kMjl+hplXMHITQ==", + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", + "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", "dev": true, "requires": { "buffer-from": "^1.0.0", @@ -9873,15 +9999,15 @@ } }, "source-map-url": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz", - "integrity": "sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM=", + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.1.tgz", + "integrity": "sha512-cPiFOTLUKvJFIg4SKVScy4ilPPW6rFgMgfuZJPNoDuMs3nC1HbMUycBoJw77xFIp6z1UJQJOfx6C9GMH80DiTw==", "dev": true }, "spdx-correct": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.0.tgz", - "integrity": "sha512-lr2EZCctC2BNR7j7WzJ2FpDznxky1sjfxvvYEyzxNyb6lZXHODmEoJeFu4JupYlkfha1KZpJyoqiJ7pgA1qq8Q==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", "dev": true, "requires": { "spdx-expression-parse": "^3.0.0", @@ -9889,15 +10015,15 @@ } }, "spdx-exceptions": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz", - "integrity": "sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", "dev": true }, "spdx-expression-parse": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.0.tgz", - "integrity": "sha512-Yg6D3XpRD4kkOmTpdgbUiEJFKghJH03fiC1OPll5h/0sO6neh2jqRDVHOQ4o/LMea0tgCkbMgea5ip/e+MkWyg==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", "dev": true, "requires": { "spdx-exceptions": "^2.1.0", @@ -9905,15 +10031,15 @@ } }, "spdx-license-ids": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz", - "integrity": "sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q==", + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz", + "integrity": "sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ==", "dev": true }, "spdy": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/spdy/-/spdy-4.0.0.tgz", - "integrity": "sha512-ot0oEGT/PGUpzf/6uk4AWLqkq+irlqHXkrdbk51oWONh3bxQmBuljxPNl66zlRRcIJStWq0QkLUCPOPjgjvU0Q==", + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/spdy/-/spdy-4.0.2.tgz", + "integrity": "sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==", "dev": true, "requires": { "debug": "^4.1.0", @@ -9938,9 +10064,9 @@ }, "dependencies": { "readable-stream": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", - "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", "dev": true, "requires": { "inherits": "^2.0.3", @@ -9998,9 +10124,9 @@ "dev": true }, "stackframe": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/stackframe/-/stackframe-1.0.4.tgz", - "integrity": "sha512-to7oADIniaYwS3MhtCa/sQhrxidCCQiF/qp4/m5iN3ipf0Y7Xlri0f6eG29r08aL7JYl8n32AF3Q5GYBZ7K8vw==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/stackframe/-/stackframe-1.2.0.tgz", + "integrity": "sha512-GrdeshiRmS1YLMYgzF16olf2jJ/IzxXY9lhKOskuVziubpTYcYqyOwYeJKzQkwy7uN0fYSsbsC4RQaXf9LCrYA==", "dev": true }, "static-extend": { @@ -10030,12 +10156,6 @@ "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", "dev": true }, - "stealthy-require": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", - "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=", - "dev": true - }, "stream-browserify": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/stream-browserify/-/stream-browserify-2.0.2.tgz", @@ -10070,9 +10190,9 @@ } }, "stream-shift": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.0.tgz", - "integrity": "sha1-1cdSgl5TZ+eG944Y5EXqIjoVWVI=", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", + "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==", "dev": true }, "strict-uri-encode": { @@ -10082,52 +10202,34 @@ "dev": true }, "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", "dev": true, "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - }, - "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" - } - } + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" } }, - "string.prototype.padend": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/string.prototype.padend/-/string.prototype.padend-3.0.0.tgz", - "integrity": "sha1-86rvfBcZ8XDF6rHDK/eA2W4h8vA=", + "string.prototype.trimend": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz", + "integrity": "sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==", "dev": true, "requires": { - "define-properties": "^1.1.2", - "es-abstract": "^1.4.3", - "function-bind": "^1.0.2" + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" } }, - "string.prototype.padstart": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/string.prototype.padstart/-/string.prototype.padstart-3.0.0.tgz", - "integrity": "sha1-W8+tOfRkm7LQMSkuGbzwtRDUskI=", + "string.prototype.trimstart": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz", + "integrity": "sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==", "dev": true, "requires": { - "define-properties": "^1.1.2", - "es-abstract": "^1.4.3", - "function-bind": "^1.0.2" + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" } }, "string_decoder": { @@ -10140,12 +10242,20 @@ } }, "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", "dev": true, "requires": { - "ansi-regex": "^4.1.0" + "ansi-regex": "^5.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + } } }, "strip-eof": { @@ -10154,6 +10264,12 @@ "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", "dev": true }, + "strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true + }, "strip-indent": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-2.0.0.tgz", @@ -10161,9 +10277,9 @@ "dev": true }, "strip-json-comments": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", - "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true }, "stylehacks": { @@ -10178,74 +10294,18 @@ }, "dependencies": { "postcss-selector-parser": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.1.tgz", - "integrity": "sha1-T4dfSvsMllc9XPTXQBGu4lCn6GU=", + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-3.1.2.tgz", + "integrity": "sha512-h7fJ/5uWuRVyOtkO45pnt1Ih40CEleeyCHzipqAZO2e5H20g25Y48uYnFUiShvY4rZWNJ/Bib/KVPmanaCtOhA==", "dev": true, "requires": { - "dot-prop": "^4.1.1", + "dot-prop": "^5.2.0", "indexes-of": "^1.0.1", "uniq": "^1.0.1" } } } }, - "stylus": { - "version": "0.54.5", - "resolved": "https://registry.npmjs.org/stylus/-/stylus-0.54.5.tgz", - "integrity": "sha1-QrlWCTHKcJDOhRWnmLqeaqPW3Hk=", - "dev": true, - "requires": { - "css-parse": "1.7.x", - "debug": "*", - "glob": "7.0.x", - "mkdirp": "0.5.x", - "sax": "0.5.x", - "source-map": "0.1.x" - }, - "dependencies": { - "glob": { - "version": "7.0.6", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.0.6.tgz", - "integrity": "sha1-IRuvr0nlJbjNkyYNFKsTYVKz9Xo=", - "dev": true, - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.0.2", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - } - }, - "sax": { - "version": "0.5.8", - "resolved": "https://registry.npmjs.org/sax/-/sax-0.5.8.tgz", - "integrity": "sha1-1HLbIo6zMcJQaw6MFVJK25OdEsE=", - "dev": true - }, - "source-map": { - "version": "0.1.43", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.1.43.tgz", - "integrity": "sha1-wkvBRspRfBRx9drL4lcbK3+eM0Y=", - "dev": true, - "requires": { - "amdefine": ">=0.0.4" - } - } - } - }, - "stylus-loader": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/stylus-loader/-/stylus-loader-3.0.2.tgz", - "integrity": "sha512-+VomPdZ6a0razP+zinir61yZgpw2NfljeSsdUF5kJuEzlo3khXhY19Fn6l8QQz1GRJGtMCo8nG5C04ePyV7SUA==", - "dev": true, - "requires": { - "loader-utils": "^1.0.2", - "lodash.clonedeep": "^4.5.0", - "when": "~3.6.x" - } - }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -10262,18 +10322,17 @@ "dev": true }, "svgo": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/svgo/-/svgo-1.2.2.tgz", - "integrity": "sha512-rAfulcwp2D9jjdGu+0CuqlrAUin6bBWrpoqXWwKDZZZJfXcUXQSxLJOFJCQCSA0x0pP2U0TxSlJu2ROq5Bq6qA==", + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/svgo/-/svgo-1.3.2.tgz", + "integrity": "sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw==", "dev": true, "requires": { "chalk": "^2.4.1", "coa": "^2.0.2", "css-select": "^2.0.0", "css-select-base-adapter": "^0.1.1", - "css-tree": "1.0.0-alpha.28", - "css-url-regex": "^1.1.0", - "csso": "^3.5.1", + "css-tree": "1.0.0-alpha.37", + "csso": "^4.0.2", "js-yaml": "^3.13.1", "mkdirp": "~0.5.1", "object.values": "^1.1.0", @@ -10284,53 +10343,48 @@ } }, "table": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/table/-/table-4.0.2.tgz", - "integrity": "sha512-UUkEAPdSGxtRpiV9ozJ5cMTtYiqz7Ni1OGqLXRCynrvzdtR1p+cfOWe2RJLwvUG8hNanaSRjecIqwOjqeatDsA==", + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", + "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", "dev": true, - "optional": true, "requires": { - "ajv": "^5.2.3", - "ajv-keywords": "^2.1.0", - "chalk": "^2.1.0", - "lodash": "^4.17.4", - "slice-ansi": "1.0.0", - "string-width": "^2.1.1" + "ajv": "^6.10.2", + "lodash": "^4.17.14", + "slice-ansi": "^2.1.0", + "string-width": "^3.0.0" }, "dependencies": { - "ajv": { - "version": "5.5.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz", - "integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=", - "dev": true, - "optional": true, - "requires": { - "co": "^4.6.0", - "fast-deep-equal": "^1.0.0", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.3.0" - } + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true }, - "ajv-keywords": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-2.1.1.tgz", - "integrity": "sha1-YXmX/F9gV2iUxDX5QNgZ4TW4B2I=", - "dev": true, - "optional": true + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true }, - "fast-deep-equal": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-1.1.0.tgz", - "integrity": "sha1-wFNHeBfIa1HaqFPIHgWbcz0CNhQ=", + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", "dev": true, - "optional": true + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } }, - "json-schema-traverse": { - "version": "0.3.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz", - "integrity": "sha1-NJptRMU6Ud6JtAgFxdXlm0F9M0A=", + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", "dev": true, - "optional": true + "requires": { + "ansi-regex": "^4.1.0" + } } } }, @@ -10341,9 +10395,9 @@ "dev": true }, "terser": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/terser/-/terser-4.1.2.tgz", - "integrity": "sha512-jvNoEQSPXJdssFwqPSgWjsOrb+ELoE+ILpHPKXC83tIxOlh2U75F1KuB2luLD/3a6/7K3Vw5pDn+hvu0C4AzSw==", + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-4.8.0.tgz", + "integrity": "sha512-EAPipTNeWsb/3wLPeup1tVPaXfIaU68xMnVdPafIL1TV05OhASArYyIfFvnvJCNrR2NIOvDVNNTFRa+Re2MWyw==", "dev": true, "requires": { "commander": "^2.20.0", @@ -10360,23 +10414,86 @@ } }, "terser-webpack-plugin": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.3.0.tgz", - "integrity": "sha512-W2YWmxPjjkUcOWa4pBEv4OP4er1aeQJlSo2UhtCFQCuRXEHjOFscO8VyWHj9JLlA0RzQb8Y2/Ta78XZvT54uGg==", + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-1.4.5.tgz", + "integrity": "sha512-04Rfe496lN8EYruwi6oPQkG0vo8C+HT49X687FZnpPF0qMAIHONI6HEXYPKDOE8e5HjXTyKfqRd/agHtH0kOtw==", "dev": true, "requires": { - "cacache": "^11.3.2", - "find-cache-dir": "^2.0.0", + "cacache": "^12.0.2", + "find-cache-dir": "^2.1.0", "is-wsl": "^1.1.0", - "loader-utils": "^1.2.3", "schema-utils": "^1.0.0", - "serialize-javascript": "^1.7.0", + "serialize-javascript": "^4.0.0", "source-map": "^0.6.1", - "terser": "^4.0.0", - "webpack-sources": "^1.3.0", + "terser": "^4.1.2", + "webpack-sources": "^1.4.0", "worker-farm": "^1.7.0" }, "dependencies": { + "find-cache-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-2.1.0.tgz", + "integrity": "sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==", + "dev": true, + "requires": { + "commondir": "^1.0.1", + "make-dir": "^2.0.0", + "pkg-dir": "^3.0.0" + } + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "requires": { + "pify": "^4.0.1", + "semver": "^5.6.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", + "dev": true + }, + "pkg-dir": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-3.0.0.tgz", + "integrity": "sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw==", + "dev": true, + "requires": { + "find-up": "^3.0.0" + } + }, "schema-utils": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", @@ -10403,9 +10520,9 @@ "dev": true }, "thenify": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.0.tgz", - "integrity": "sha1-5p44obq+lpsBCCB5eLn2K4hgSDk=", + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", "dev": true, "requires": { "any-promise": "^1.0.0" @@ -10421,9 +10538,9 @@ } }, "thread-loader": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/thread-loader/-/thread-loader-2.1.2.tgz", - "integrity": "sha512-7xpuc9Ifg6WU+QYw/8uUqNdRwMD+N5gjwHKMqETrs96Qn+7BHwECpt2Brzr4HFlf4IAkZsayNhmGdbkBsTJ//w==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/thread-loader/-/thread-loader-2.1.3.tgz", + "integrity": "sha512-wNrVKH2Lcf8ZrWxDF/khdlLlsTMczdcwPA9VEK4c2exlEPynYWxi9op3nPTo5lAnDIkE0rQEB3VBP+4Zncc9Hg==", "dev": true, "requires": { "loader-runner": "^2.3.1", @@ -10448,15 +10565,15 @@ } }, "thunky": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.0.3.tgz", - "integrity": "sha512-YwT8pjmNcAXBZqrubu22P4FYsh2D4dxRmnWBOL8Jk8bUcRUtc5326kx32tuTmFDAZtLOGEVNl8POAR8j896Iow==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", + "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==", "dev": true }, "timers-browserify": { - "version": "2.0.10", - "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.10.tgz", - "integrity": "sha512-YvC1SV1XdOUaL6gx5CoGroT3Gu49pK9+TZ38ErPldOWW4j49GI1HKs9DV+KGq/w6y+LZ72W1c8cKz2vzY+qpzg==", + "version": "2.0.12", + "resolved": "https://registry.npmjs.org/timers-browserify/-/timers-browserify-2.0.12.tgz", + "integrity": "sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==", "dev": true, "requires": { "setimmediate": "^1.0.4" @@ -10544,39 +10661,31 @@ "dev": true }, "tough-cookie": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", - "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz", + "integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==", "dev": true, "requires": { - "psl": "^1.1.24", - "punycode": "^1.4.1" - }, - "dependencies": { - "punycode": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", - "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=", - "dev": true - } + "psl": "^1.1.28", + "punycode": "^2.1.1" } }, - "trim-right": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/trim-right/-/trim-right-1.0.1.tgz", - "integrity": "sha1-yy4SAwZ+DI3h9hQJS5/kVwTqYAM=", - "dev": true - }, "tryer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/tryer/-/tryer-1.0.1.tgz", "integrity": "sha512-c3zayb8/kWWpycWYg87P71E1S1ZL6b6IJxfb5fvsUgsf0S2MVGaDhDXXjDMpdCpfWXqptc+4mXwmiy1ypXqRAA==", "dev": true }, + "ts-pnp": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/ts-pnp/-/ts-pnp-1.2.0.tgz", + "integrity": "sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw==", + "dev": true + }, "tslib": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.10.0.tgz", - "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==", + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", "dev": true }, "tty-browserify": { @@ -10672,15 +10781,15 @@ } }, "unicode-match-property-value-ecmascript": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.1.0.tgz", - "integrity": "sha512-hDTHvaBk3RmFzvSl0UVrUmC3PuW9wKVnpoUDYH0JDkSIovzw+J5viQmeYHxVSBptubnr7PbH2e0fnpDRQnQl5g==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.2.0.tgz", + "integrity": "sha512-wjuQHGQVofmSJv1uVISKLE5zO2rNGzM/KCYZch/QQvez7C1hUhBIuZ701fYXExuufJFMPhv2SyL8CyoIfMLbIQ==", "dev": true }, "unicode-property-aliases-ecmascript": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.0.5.tgz", - "integrity": "sha512-L5RAqCfXqAwR3RriF8pM0lU0w4Ryf/GgzONwi6KnL1taJQa7x1TCxdJnILX59WIGOwR57IVxn7Nej0fz1Ny6fw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.1.0.tgz", + "integrity": "sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg==", "dev": true }, "union-value": { @@ -10784,9 +10893,9 @@ } }, "upath": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/upath/-/upath-1.1.2.tgz", - "integrity": "sha512-kXpym8nmDmlCBr7nKdIx8P2jNBa+pBpIUFRnKJ4dr8htyYGJFokkr2ZvERRtUN+9SY+JqXouNgUPtv6JQva/2Q==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", + "integrity": "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==", "dev": true }, "upper-case": { @@ -10796,9 +10905,9 @@ "dev": true }, "uri-js": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", - "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", "dev": true, "requires": { "punycode": "^2.1.0" @@ -10829,27 +10938,14 @@ } }, "url-loader": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-1.1.2.tgz", - "integrity": "sha512-dXHkKmw8FhPqu8asTc1puBfe3TehOCo2+RmOOev5suNCIYBcT626kxiWg1NBVkwc4rO8BGa7gP70W7VXuqHrjg==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/url-loader/-/url-loader-2.3.0.tgz", + "integrity": "sha512-goSdg8VY+7nPZKUEChZSEtW5gjbS66USIGCeSJ1OVOJ7Yfuh/36YxCwMi5HVEJh6mqUYOoy3NJ0vlOMrWsSHog==", "dev": true, "requires": { - "loader-utils": "^1.1.0", - "mime": "^2.0.3", - "schema-utils": "^1.0.0" - }, - "dependencies": { - "schema-utils": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", - "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", - "dev": true, - "requires": { - "ajv": "^6.1.0", - "ajv-errors": "^1.0.0", - "ajv-keywords": "^3.1.0" - } - } + "loader-utils": "^1.2.3", + "mime": "^2.4.4", + "schema-utils": "^2.5.0" } }, "url-parse": { @@ -10892,13 +10988,36 @@ "dev": true }, "util.promisify": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/util.promisify/-/util.promisify-1.0.0.tgz", - "integrity": "sha512-i+6qA2MPhvoKLuxnJNpXAGhg7HphQOSUq2LKMZD0m15EiskXUkMvKdF4Uui0WYeCUGea+o2cw/ZuwehtfsrNkA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/util.promisify/-/util.promisify-1.0.1.tgz", + "integrity": "sha512-g9JpC/3He3bm38zsLupWryXHoEcS22YHthuPQSJdMy6KNrzIRzWqcsHzD/WUnqe45whVou4VIsPew37DoXWNrA==", "dev": true, "requires": { - "define-properties": "^1.1.2", - "object.getownpropertydescriptors": "^2.0.3" + "define-properties": "^1.1.3", + "es-abstract": "^1.17.2", + "has-symbols": "^1.0.1", + "object.getownpropertydescriptors": "^2.1.0" + }, + "dependencies": { + "es-abstract": { + "version": "1.17.7", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.7.tgz", + "integrity": "sha512-VBl/gnfcJ7OercKA9MVaegWsBHFjV492syMudcnQZvt/Dw8ezpcOHYZXa/J96O8vx+g4x65YKhxOwDUh63aS5g==", + "dev": true, + "requires": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + } + } } }, "utila": { @@ -10914,9 +11033,15 @@ "dev": true }, "uuid": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", - "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "dev": true + }, + "v8-compile-cache": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.2.0.tgz", + "integrity": "sha512-gTpR5XQNKFwOd4clxfnhaqvfqMpqEwr4tOtCyz4MtYZX2JYhfr1JvBFKdS+7K/9rfpZR3VLX+YWBbKoxCgS43Q==", "dev": true }, "validate-npm-package-license": { @@ -10936,9 +11061,9 @@ "dev": true }, "vendors": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/vendors/-/vendors-1.0.3.tgz", - "integrity": "sha512-fOi47nsJP5Wqefa43kyWSg80qF+Q3XA6MUkgi7Hp1HQaKDQW4cQrK2D0P7mmbFtsV1N89am55Yru/nyEwRubcw==", + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/vendors/-/vendors-1.0.4.tgz", + "integrity": "sha512-/juG65kTL4Cy2su4P8HjtkTxk6VmJDiOPBufWniqQ6wknac6jNiXS9vU+hO3wgusiyqWlzTbVHi0dyJqRONg3w==", "dev": true }, "verror": { @@ -10953,15 +11078,15 @@ } }, "vm-browserify": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.0.tgz", - "integrity": "sha512-iq+S7vZJE60yejDYM0ek6zg308+UZsdtPExWP9VZoCFCz1zkJoXFnAX7aZfd/ZwrkidzdUZL0C/ryW+JwAiIGw==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz", + "integrity": "sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==", "dev": true }, "vue": { - "version": "2.6.10", - "resolved": "https://registry.npmjs.org/vue/-/vue-2.6.10.tgz", - "integrity": "sha512-ImThpeNU9HbdZL3utgMCq0oiMzAkt1mcgy3/E6zWC/G6AaQoeuFdsl9nDhTDU3X1R6FK7nsIUuRACVcjI+A2GQ==" + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/vue/-/vue-2.6.12.tgz", + "integrity": "sha512-uhmLFETqPPNyuLLbsKz6ioJ4q7AZHzD8ZVFNATNyICSZouqP2Sz0rotWQC8UNBF6VGSCs5abnKJoStA6JbCbfg==" }, "vue-cli-plugin-axios": { "version": "0.0.4", @@ -10970,72 +11095,99 @@ "dev": true }, "vue-cli-plugin-vuetify": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/vue-cli-plugin-vuetify/-/vue-cli-plugin-vuetify-0.5.0.tgz", - "integrity": "sha512-TigfiZUs7SN3Z6uxKilqJUtYxte8vp0F4QxabCli6hkKPqU97JzAZc3P7AL6omkRAd2DMI26fOrIGjuALTvXww==", - "dev": true - }, - "vue-eslint-parser": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-2.0.3.tgz", - "integrity": "sha512-ZezcU71Owm84xVF6gfurBQUGg8WQ+WZGxgDEQu1IHFBZNx7BFZg3L1yHxrCBNNwbwFtE1GuvfJKMtb6Xuwc/Bw==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/vue-cli-plugin-vuetify/-/vue-cli-plugin-vuetify-2.1.0.tgz", + "integrity": "sha512-cvJR2+6U1PS4UUP7NnuylWfxM3LrzKnusOgrCZUyzr5abyDxf/t0TZy5EqfJwAa9/TsIO0W4gOoaoy/f4Yw0aQ==", "dev": true, - "optional": true, "requires": { - "debug": "^3.1.0", - "eslint-scope": "^3.7.1", - "eslint-visitor-keys": "^1.0.0", - "espree": "^3.5.2", - "esquery": "^1.0.0", - "lodash": "^4.17.4" + "null-loader": "^3.0.0", + "semver": "^7.1.2", + "shelljs": "^0.8.3" }, "dependencies": { - "debug": { - "version": "3.2.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz", - "integrity": "sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==", + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", "dev": true, - "optional": true, "requires": { - "ms": "^2.1.1" + "yallist": "^4.0.0" + } + }, + "semver": { + "version": "7.3.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.4.tgz", + "integrity": "sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" } }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + } + } + }, + "vue-eslint-parser": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/vue-eslint-parser/-/vue-eslint-parser-7.4.1.tgz", + "integrity": "sha512-AFvhdxpFvliYq1xt/biNBslTHE/zbEvSnr1qfHA/KxRIpErmEDrQZlQnvEexednRHmLfDNOMuDYwZL5xkLzIXQ==", + "dev": true, + "requires": { + "debug": "^4.1.1", + "eslint-scope": "^5.0.0", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.2.1", + "esquery": "^1.0.1", + "lodash": "^4.17.15" + }, + "dependencies": { "eslint-scope": { - "version": "3.7.3", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-3.7.3.tgz", - "integrity": "sha512-W+B0SvF4gamyCTmUc+uITPY0989iXVfKvhwtmJocTaYoc/3khEHmEmvfY/Gn9HA9VV75jrQECsHizkNw1b68FA==", + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", "dev": true, - "optional": true, "requires": { - "esrecurse": "^4.1.0", + "esrecurse": "^4.3.0", "estraverse": "^4.1.1" } } } }, "vue-hot-reload-api": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/vue-hot-reload-api/-/vue-hot-reload-api-2.3.3.tgz", - "integrity": "sha512-KmvZVtmM26BQOMK1rwUZsrqxEGeKiYSZGA7SNWE6uExx8UX/cj9hq2MRV/wWC3Cq6AoeDGk57rL9YMFRel/q+g==", + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/vue-hot-reload-api/-/vue-hot-reload-api-2.3.4.tgz", + "integrity": "sha512-BXq3jwIagosjgNVae6tkHzzIk6a8MHFtzAdwhnV5VlvPTFxDCvIttgSiHWjdGoTJvXtmRu5HacExfdarRcFhog==", "dev": true }, "vue-loader": { - "version": "15.7.0", - "resolved": "https://registry.npmjs.org/vue-loader/-/vue-loader-15.7.0.tgz", - "integrity": "sha512-x+NZ4RIthQOxcFclEcs8sXGEWqnZHodL2J9Vq+hUz+TDZzBaDIh1j3d9M2IUlTjtrHTZy4uMuRdTi8BGws7jLA==", + "version": "15.9.6", + "resolved": "https://registry.npmjs.org/vue-loader/-/vue-loader-15.9.6.tgz", + "integrity": "sha512-j0cqiLzwbeImIC6nVIby2o/ABAWhlppyL/m5oJ67R5MloP0hj/DtFgb0Zmq3J9CG7AJ+AXIvHVnJAPBvrLyuDg==", "dev": true, "requires": { - "@vue/component-compiler-utils": "^2.5.1", + "@vue/component-compiler-utils": "^3.1.0", "hash-sum": "^1.0.2", "loader-utils": "^1.1.0", "vue-hot-reload-api": "^2.3.0", "vue-style-loader": "^4.1.0" + }, + "dependencies": { + "hash-sum": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/hash-sum/-/hash-sum-1.0.2.tgz", + "integrity": "sha1-M7QHd3VMZDJXPBIMw4CLvRDUfwQ=", + "dev": true + } } }, "vue-router": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-3.0.7.tgz", - "integrity": "sha512-utJ+QR3YlIC/6x6xq17UMXeAfxEvXA0VKD3PiSio7hBOZNusA1jXcbxZxVEfJunLp48oonjTepY8ORoIlRx/EQ==" + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/vue-router/-/vue-router-3.5.1.tgz", + "integrity": "sha512-RRQNLT8Mzr8z7eL4p7BtKvRaTSGdCbTy2+Mm5HTJvLGYSSeG9gDzNasJPP/yOYKLy+/cLG/ftrqq5fvkFwBJEw==" }, "vue-style-loader": { "version": "4.1.2", @@ -11045,12 +11197,20 @@ "requires": { "hash-sum": "^1.0.2", "loader-utils": "^1.0.2" + }, + "dependencies": { + "hash-sum": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/hash-sum/-/hash-sum-1.0.2.tgz", + "integrity": "sha1-M7QHd3VMZDJXPBIMw4CLvRDUfwQ=", + "dev": true + } } }, "vue-template-compiler": { - "version": "2.6.10", - "resolved": "https://registry.npmjs.org/vue-template-compiler/-/vue-template-compiler-2.6.10.tgz", - "integrity": "sha512-jVZkw4/I/HT5ZMvRnhv78okGusqe0+qH2A0Em0Cp8aq78+NK9TII263CDVz2QXZsIT+yyV/gZc/j/vlwa+Epyg==", + "version": "2.6.12", + "resolved": "https://registry.npmjs.org/vue-template-compiler/-/vue-template-compiler-2.6.12.tgz", + "integrity": "sha512-OzzZ52zS41YUbkCBfdXShQTe69j1gQDZ9HIX8miuC9C3rBCk9wIRjLiZZLrmX9V+Ftq/YEyv1JaVr5Y/hNtByg==", "dev": true, "requires": { "de-indent": "^1.0.2", @@ -11064,28 +11224,180 @@ "dev": true }, "vuetify": { - "version": "1.5.16", - "resolved": "https://registry.npmjs.org/vuetify/-/vuetify-1.5.16.tgz", - "integrity": "sha512-yBgOsfurKQkeS+l+rrTQZ2bFk0D9ezjHhkuVM5A/yVzcg62sY2nfYaq/H++uezBWC9WYFrp/5OmSocJQcWn9Qw==" + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/vuetify/-/vuetify-2.4.3.tgz", + "integrity": "sha512-i2/Df0U0sedlaCbft4NMbna7WXbTCBhKVYTMjBrLVzrYTTWqzSO7ZCxLuDRY7MjwQhn7AOec7ent9U/NyIICqA==" }, "vuetify-loader": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/vuetify-loader/-/vuetify-loader-1.2.2.tgz", - "integrity": "sha512-j6KWPuwQ4xTxPDksbFwbSJle7+3oSjNvJ/CViTEgbPLFPqnNsR8JUtRldURIQ0cmAmr0/CoLSZkj0B8JKqOBMA==", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/vuetify-loader/-/vuetify-loader-1.7.1.tgz", + "integrity": "sha512-zRfgNxi/SeE8Nh4Vhw3aIJftYrcJWd3PqPn8+cB/F9CgBVhJo5qp2BuFL70k33G1kTaBvcjYgM+vZc9nvvU3xg==", "dev": true, "requires": { - "loader-utils": "^1.1.0" + "decache": "^4.6.0", + "file-loader": "^6.2.0", + "loader-utils": "^2.0.0" + }, + "dependencies": { + "file-loader": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz", + "integrity": "sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==", + "dev": true, + "requires": { + "loader-utils": "^2.0.0", + "schema-utils": "^3.0.0" + } + }, + "loader-utils": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.0.tgz", + "integrity": "sha512-rP4F0h2RaWSvPEkD7BLDFQnvSf+nK+wr3ESUjNTyAGobqrijmW92zc+SO6d4p4B1wh7+B/Jg1mkQe5NYUEHtHQ==", + "dev": true, + "requires": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" + } + }, + "schema-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.0.0.tgz", + "integrity": "sha512-6D82/xSzO094ajanoOSbe4YvXWMfn2A//8Y1+MUqFAJul5Bs+yn36xbK9OtNDcRVSBJ9jjeoXftM6CfztsjOAA==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.6", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + } + } } }, "watchpack": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.6.0.tgz", - "integrity": "sha512-i6dHe3EyLjMmDlU1/bGQpEw25XSjkJULPuAVKCbNRefQVq48yXKUpwg538F7AZTf9kyr57zj++pQFltUa5H7yA==", + "version": "1.7.5", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-1.7.5.tgz", + "integrity": "sha512-9P3MWk6SrKjHsGkLT2KHXdQ/9SNkyoJbabxnKOoJepsvJjJG8uYTR3yTPxPQvNDI3w4Nz1xnE0TLHK4RIVe/MQ==", "dev": true, "requires": { - "chokidar": "^2.0.2", + "chokidar": "^3.4.1", "graceful-fs": "^4.1.2", - "neo-async": "^2.5.0" + "neo-async": "^2.5.0", + "watchpack-chokidar2": "^2.0.1" + } + }, + "watchpack-chokidar2": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/watchpack-chokidar2/-/watchpack-chokidar2-2.0.1.tgz", + "integrity": "sha512-nCFfBIPKr5Sh61s4LPpy1Wtfi0HE8isJ3d2Yb5/Ppw2P2B/3eVSEBjKfN0fmHJSK14+31KwMKmcrzs2GM4P0Ww==", + "dev": true, + "optional": true, + "requires": { + "chokidar": "^2.1.8" + }, + "dependencies": { + "anymatch": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", + "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", + "dev": true, + "optional": true, + "requires": { + "micromatch": "^3.1.4", + "normalize-path": "^2.1.1" + }, + "dependencies": { + "normalize-path": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", + "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", + "dev": true, + "optional": true, + "requires": { + "remove-trailing-separator": "^1.0.1" + } + } + } + }, + "binary-extensions": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", + "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", + "dev": true, + "optional": true + }, + "chokidar": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", + "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", + "dev": true, + "optional": true, + "requires": { + "anymatch": "^2.0.0", + "async-each": "^1.0.1", + "braces": "^2.3.2", + "fsevents": "^1.2.7", + "glob-parent": "^3.1.0", + "inherits": "^2.0.3", + "is-binary-path": "^1.0.0", + "is-glob": "^4.0.0", + "normalize-path": "^3.0.0", + "path-is-absolute": "^1.0.0", + "readdirp": "^2.2.1", + "upath": "^1.1.1" + } + }, + "fsevents": { + "version": "1.2.13", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", + "integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==", + "dev": true, + "optional": true + }, + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "dev": true, + "optional": true, + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "dev": true, + "optional": true, + "requires": { + "is-extglob": "^2.1.0" + } + } + } + }, + "is-binary-path": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", + "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=", + "dev": true, + "optional": true, + "requires": { + "binary-extensions": "^1.0.0" + } + }, + "readdirp": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", + "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", + "dev": true, + "optional": true, + "requires": { + "graceful-fs": "^4.1.11", + "micromatch": "^3.1.10", + "readable-stream": "^2.0.2" + } + } } }, "wbuf": { @@ -11107,45 +11419,57 @@ } }, "webpack": { - "version": "4.28.4", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.28.4.tgz", - "integrity": "sha512-NxjD61WsK/a3JIdwWjtIpimmvE6UrRi3yG54/74Hk9rwNj5FPkA4DJCf1z4ByDWLkvZhTZE+P3C/eh6UD5lDcw==", - "dev": true, - "requires": { - "@webassemblyjs/ast": "1.7.11", - "@webassemblyjs/helper-module-context": "1.7.11", - "@webassemblyjs/wasm-edit": "1.7.11", - "@webassemblyjs/wasm-parser": "1.7.11", - "acorn": "^5.6.2", - "acorn-dynamic-import": "^3.0.0", - "ajv": "^6.1.0", - "ajv-keywords": "^3.1.0", - "chrome-trace-event": "^1.0.0", - "enhanced-resolve": "^4.1.0", - "eslint-scope": "^4.0.0", + "version": "4.46.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-4.46.0.tgz", + "integrity": "sha512-6jJuJjg8znb/xRItk7bkT0+Q7AHCYjjFnvKIWQPkNIOyRqoCGvkOs0ipeQzrqz4l5FtN5ZI/ukEHroeX/o1/5Q==", + "dev": true, + "requires": { + "@webassemblyjs/ast": "1.9.0", + "@webassemblyjs/helper-module-context": "1.9.0", + "@webassemblyjs/wasm-edit": "1.9.0", + "@webassemblyjs/wasm-parser": "1.9.0", + "acorn": "^6.4.1", + "ajv": "^6.10.2", + "ajv-keywords": "^3.4.1", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^4.5.0", + "eslint-scope": "^4.0.3", "json-parse-better-errors": "^1.0.2", - "loader-runner": "^2.3.0", - "loader-utils": "^1.1.0", - "memory-fs": "~0.4.1", - "micromatch": "^3.1.8", - "mkdirp": "~0.5.0", - "neo-async": "^2.5.0", - "node-libs-browser": "^2.0.0", - "schema-utils": "^0.4.4", - "tapable": "^1.1.0", - "terser-webpack-plugin": "^1.1.0", - "watchpack": "^1.5.0", - "webpack-sources": "^1.3.0" + "loader-runner": "^2.4.0", + "loader-utils": "^1.2.3", + "memory-fs": "^0.4.1", + "micromatch": "^3.1.10", + "mkdirp": "^0.5.3", + "neo-async": "^2.6.1", + "node-libs-browser": "^2.2.1", + "schema-utils": "^1.0.0", + "tapable": "^1.1.3", + "terser-webpack-plugin": "^1.4.3", + "watchpack": "^1.7.4", + "webpack-sources": "^1.4.1" + }, + "dependencies": { + "schema-utils": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-1.0.0.tgz", + "integrity": "sha512-i27Mic4KovM/lnGsy8whRCHhc7VicJajAjTrYg11K9zfZXnYIt4k5F+kZkwjnrhKzLic/HLU4j11mjsz2G/75g==", + "dev": true, + "requires": { + "ajv": "^6.1.0", + "ajv-errors": "^1.0.0", + "ajv-keywords": "^3.1.0" + } + } } }, "webpack-bundle-analyzer": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.3.2.tgz", - "integrity": "sha512-7qvJLPKB4rRWZGjVp5U1KEjwutbDHSKboAl0IfafnrdXMrgC0tOtZbQD6Rw0u4cmpgRN4O02Fc0t8eAT+FgGzA==", + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.9.0.tgz", + "integrity": "sha512-Ob8amZfCm3rMB1ScjQVlbYYUEJyEjdEtQ92jqiFUYt5VkEeO2v5UMbv49P/gnmCZm3A6yaFQzCBvpZqN4MUsdA==", "dev": true, "requires": { - "acorn": "^6.0.7", - "acorn-walk": "^6.1.1", + "acorn": "^7.1.1", + "acorn-walk": "^7.1.1", "bfj": "^6.1.1", "chalk": "^2.4.1", "commander": "^2.18.0", @@ -11153,79 +11477,82 @@ "express": "^4.16.3", "filesize": "^3.6.1", "gzip-size": "^5.0.0", - "lodash": "^4.17.10", + "lodash": "^4.17.19", "mkdirp": "^0.5.1", "opener": "^1.5.1", "ws": "^6.0.0" }, "dependencies": { "acorn": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-6.2.0.tgz", - "integrity": "sha512-8oe72N3WPMjA+2zVG71Ia0nXZ8DpQH+QyyHO+p06jT8eg8FGG3FbcUIi8KziHlAfheJQZeoqbvq1mQSQHXKYLw==", + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", "dev": true } } }, "webpack-chain": { - "version": "4.12.1", - "resolved": "https://registry.npmjs.org/webpack-chain/-/webpack-chain-4.12.1.tgz", - "integrity": "sha512-BCfKo2YkDe2ByqkEWe1Rw+zko4LsyS75LVr29C6xIrxAg9JHJ4pl8kaIZ396SUSNp6b4815dRZPSTAS8LlURRQ==", + "version": "6.5.1", + "resolved": "https://registry.npmjs.org/webpack-chain/-/webpack-chain-6.5.1.tgz", + "integrity": "sha512-7doO/SRtLu8q5WM0s7vPKPWX580qhi0/yBHkOxNkv50f6qB76Zy9o2wRTrrPULqYTvQlVHuvbA8v+G5ayuUDsA==", "dev": true, "requires": { "deepmerge": "^1.5.2", - "javascript-stringify": "^1.6.0" + "javascript-stringify": "^2.0.1" } }, "webpack-dev-middleware": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-3.7.0.tgz", - "integrity": "sha512-qvDesR1QZRIAZHOE3iQ4CXLZZSQ1lAUsSpnQmlB1PBfoN/xdRjmge3Dok0W4IdaVLJOGJy3sGI4sZHwjRU0PCA==", + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-3.7.3.tgz", + "integrity": "sha512-djelc/zGiz9nZj/U7PTBi2ViorGJXEWo/3ltkPbDyxCXhhEXkW0ce99falaok4TPj+AsxLiXJR0EBOb0zh9fKQ==", "dev": true, "requires": { "memory-fs": "^0.4.1", - "mime": "^2.4.2", + "mime": "^2.4.4", + "mkdirp": "^0.5.1", "range-parser": "^1.2.1", "webpack-log": "^2.0.0" } }, "webpack-dev-server": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-3.7.2.tgz", - "integrity": "sha512-mjWtrKJW2T9SsjJ4/dxDC2fkFVUw8jlpemDERqV0ZJIkjjjamR2AbQlr3oz+j4JLhYCHImHnXZK5H06P2wvUew==", + "version": "3.11.2", + "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-3.11.2.tgz", + "integrity": "sha512-A80BkuHRQfCiNtGBS1EMf2ChTUs0x+B3wGDFmOeT4rmJOHhHTCH2naNxIHhmkr0/UillP4U3yeIyv1pNp+QDLQ==", "dev": true, "requires": { "ansi-html": "0.0.7", "bonjour": "^3.5.0", - "chokidar": "^2.1.6", + "chokidar": "^2.1.8", "compression": "^1.7.4", "connect-history-api-fallback": "^1.6.0", "debug": "^4.1.1", "del": "^4.1.1", "express": "^4.17.1", - "html-entities": "^1.2.1", - "http-proxy-middleware": "^0.19.1", + "html-entities": "^1.3.1", + "http-proxy-middleware": "0.19.1", "import-local": "^2.0.0", "internal-ip": "^4.3.0", "ip": "^1.1.5", + "is-absolute-url": "^3.0.3", "killable": "^1.0.1", - "loglevel": "^1.6.3", + "loglevel": "^1.6.8", "opn": "^5.5.0", "p-retry": "^3.0.1", - "portfinder": "^1.0.20", + "portfinder": "^1.0.26", "schema-utils": "^1.0.0", - "selfsigned": "^1.10.4", - "semver": "^6.1.1", + "selfsigned": "^1.10.8", + "semver": "^6.3.0", "serve-index": "^1.9.1", - "sockjs": "0.3.19", - "sockjs-client": "1.3.0", - "spdy": "^4.0.0", + "sockjs": "^0.3.21", + "sockjs-client": "^1.5.0", + "spdy": "^4.0.2", "strip-ansi": "^3.0.1", "supports-color": "^6.1.0", "url": "^0.11.0", - "webpack-dev-middleware": "^3.7.0", + "webpack-dev-middleware": "^3.7.2", "webpack-log": "^2.0.0", - "yargs": "12.0.5" + "ws": "^6.2.1", + "yargs": "^13.3.2" }, "dependencies": { "ansi-regex": { @@ -11234,34 +11561,93 @@ "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", "dev": true }, + "anymatch": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", + "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", + "dev": true, + "requires": { + "micromatch": "^3.1.4", + "normalize-path": "^2.1.1" + }, + "dependencies": { + "normalize-path": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", + "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", + "dev": true, + "requires": { + "remove-trailing-separator": "^1.0.1" + } + } + } + }, + "binary-extensions": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", + "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", + "dev": true + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true + }, + "chokidar": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", + "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", + "dev": true, + "requires": { + "anymatch": "^2.0.0", + "async-each": "^1.0.1", + "braces": "^2.3.2", + "fsevents": "^1.2.7", + "glob-parent": "^3.1.0", + "inherits": "^2.0.3", + "is-binary-path": "^1.0.0", + "is-glob": "^4.0.0", + "normalize-path": "^3.0.0", + "path-is-absolute": "^1.0.0", + "readdirp": "^2.2.1", + "upath": "^1.1.1" + } + }, "cliui": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", - "integrity": "sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", "dev": true, "requires": { - "string-width": "^2.1.1", - "strip-ansi": "^4.0.0", - "wrap-ansi": "^2.0.0" + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" }, "dependencies": { "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", "dev": true }, "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", "dev": true, "requires": { - "ansi-regex": "^3.0.0" + "ansi-regex": "^4.1.0" } } } }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, "find-up": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", @@ -11271,21 +11657,55 @@ "locate-path": "^3.0.0" } }, - "get-caller-file": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", - "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", + "fsevents": { + "version": "1.2.13", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", + "integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==", + "dev": true, + "optional": true + }, + "glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "dev": true, + "requires": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + }, + "dependencies": { + "is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "dev": true, + "requires": { + "is-extglob": "^2.1.0" + } + } + } + }, + "is-absolute-url": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-absolute-url/-/is-absolute-url-3.0.3.tgz", + "integrity": "sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==", "dev": true }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "is-binary-path": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", + "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=", "dev": true, "requires": { - "number-is-nan": "^1.0.0" + "binary-extensions": "^1.0.0" } }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true + }, "locate-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", @@ -11296,15 +11716,6 @@ "path-exists": "^3.0.0" } }, - "p-limit": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", - "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", - "dev": true, - "requires": { - "p-try": "^2.0.0" - } - }, "p-locate": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", @@ -11314,17 +11725,22 @@ "p-limit": "^2.0.0" } }, - "p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=", "dev": true }, - "require-main-filename": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", - "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=", - "dev": true + "readdirp": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", + "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.11", + "micromatch": "^3.1.10", + "readable-stream": "^2.0.2" + } }, "schema-utils": { "version": "1.0.0", @@ -11338,11 +11754,39 @@ } }, "semver": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.2.0.tgz", - "integrity": "sha512-jdFC1VdUGT/2Scgbimf7FSx9iJLXoqfglSF+gJeuNWVpiE37OIbc1jywR/GJyFdz3mnkz2/id0L0J/cr0izR5A==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", "dev": true }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + }, + "dependencies": { + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "dev": true, + "requires": { + "ansi-regex": "^4.1.0" + } + } + } + }, "strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", @@ -11362,52 +11806,55 @@ } }, "wrap-ansi": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", - "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", "dev": true, "requires": { - "string-width": "^1.0.1", - "strip-ansi": "^3.0.1" + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" }, "dependencies": { - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", + "dev": true + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", "dev": true, "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" + "ansi-regex": "^4.1.0" } } } }, "yargs": { - "version": "12.0.5", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-12.0.5.tgz", - "integrity": "sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw==", + "version": "13.3.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.3.2.tgz", + "integrity": "sha512-AX3Zw5iPruN5ie6xGRIDgqkT+ZhnRlZMLMHAs8tg7nRruy2Nb+i5o9bwghAogtM08q1dpr2LVoS8KSTMYpWXUw==", "dev": true, "requires": { - "cliui": "^4.0.0", - "decamelize": "^1.2.0", + "cliui": "^5.0.0", "find-up": "^3.0.0", - "get-caller-file": "^1.0.1", - "os-locale": "^3.0.0", + "get-caller-file": "^2.0.1", "require-directory": "^2.1.1", - "require-main-filename": "^1.0.1", + "require-main-filename": "^2.0.0", "set-blocking": "^2.0.0", - "string-width": "^2.0.0", + "string-width": "^3.0.0", "which-module": "^2.0.0", - "y18n": "^3.2.1 || ^4.0.0", - "yargs-parser": "^11.1.1" + "y18n": "^4.0.0", + "yargs-parser": "^13.1.2" } }, "yargs-parser": { - "version": "11.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-11.1.1.tgz", - "integrity": "sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ==", + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.2.tgz", + "integrity": "sha512-3lbsNRf/j+A4QuSZfDRA7HRSfWrzO0YjqTJd5kjAq37Zep1CEgaYmrH9Q3GwPiB9cHyd1Y1UwggGhJGoxipbzg==", "dev": true, "requires": { "camelcase": "^5.0.0", @@ -11427,18 +11874,18 @@ } }, "webpack-merge": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-4.2.1.tgz", - "integrity": "sha512-4p8WQyS98bUJcCvFMbdGZyZmsKuWjWVnVHnAS3FFg0HDaRVrPbkivx2RYCre8UiemD67RsiFFLfn4JhLAin8Vw==", + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-4.2.2.tgz", + "integrity": "sha512-TUE1UGoTX2Cd42j3krGYqObZbOD+xF7u28WB7tfUordytSjbWTIjK/8V0amkBfTYN4/pB/GIDlJZZ657BGG19g==", "dev": true, "requires": { - "lodash": "^4.17.5" + "lodash": "^4.17.15" } }, "webpack-sources": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.3.0.tgz", - "integrity": "sha512-OiVgSrbGu7NEnEvQJJgdSFPl2qWKkWq5lHMhgiToIiN9w34EBnjYzSYs+VbL5KoYiLNtFFa7BZIKxRED3I32pA==", + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-1.4.3.tgz", + "integrity": "sha512-lgTS3Xhv1lCOKo7SA5TjKXMjpSM4sBjNV5+q2bqesbSPs5FjGmU6jjtBSkX9b4qW87vDIsCIlUPOEhbZrMdjeQ==", "dev": true, "requires": { "source-list-map": "^2.0.0", @@ -11454,12 +11901,12 @@ } }, "websocket-driver": { - "version": "0.7.3", - "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.3.tgz", - "integrity": "sha512-bpxWlvbbB459Mlipc5GBzzZwhoZgGEZLuqPaR0INBGnPAY1vdBX6hPnoFXiw+3yWxDuHyQjO2oXTMyS8A5haFg==", + "version": "0.7.4", + "resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz", + "integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==", "dev": true, "requires": { - "http-parser-js": ">=0.4.0 <0.4.11", + "http-parser-js": ">=0.5.1", "safe-buffer": ">=5.1.0", "websocket-extensions": ">=0.1.1" } @@ -11470,12 +11917,6 @@ "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==", "dev": true }, - "when": { - "version": "3.6.4", - "resolved": "https://registry.npmjs.org/when/-/when-3.6.4.tgz", - "integrity": "sha1-RztRfsFZ4rhQBUl6E5g/CVQS404=", - "dev": true - }, "which": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", @@ -11491,10 +11932,10 @@ "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", "dev": true }, - "wordwrap": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", - "integrity": "sha1-J1hIEIkUVqQXHI0CJkQa3pDLyus=", + "word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", "dev": true }, "worker-farm": { @@ -11507,26 +11948,39 @@ } }, "wrap-ansi": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", - "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, "requires": { - "ansi-styles": "^3.2.0", - "string-width": "^3.0.0", - "strip-ansi": "^5.0.0" + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" }, "dependencies": { - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" + "color-convert": "^2.0.1" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true } } }, @@ -11537,11 +11991,10 @@ "dev": true }, "write": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/write/-/write-0.2.1.tgz", - "integrity": "sha1-X8A4KOJkzqP+kUVUdvejxWbLB1c=", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", + "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", "dev": true, - "optional": true, "requires": { "mkdirp": "^0.5.1" } @@ -11562,101 +12015,56 @@ "dev": true }, "y18n": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", - "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", + "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==", "dev": true }, "yallist": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", - "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", "dev": true }, "yargs": { - "version": "13.2.4", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-13.2.4.tgz", - "integrity": "sha512-HG/DWAJa1PAnHT9JAhNa8AbAv3FPaiLzioSjCcmuXXhP8MlpHO5vwls4g4j6n30Z74GVQj8Xa62dWVx1QCGklg==", + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", "dev": true, "requires": { - "cliui": "^5.0.0", - "find-up": "^3.0.0", - "get-caller-file": "^2.0.1", - "os-locale": "^3.1.0", + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", - "require-main-filename": "^2.0.0", - "set-blocking": "^2.0.0", - "string-width": "^3.0.0", - "which-module": "^2.0.0", - "y18n": "^4.0.0", - "yargs-parser": "^13.1.0" + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" }, "dependencies": { - "find-up": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", - "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", - "dev": true, - "requires": { - "locate-path": "^3.0.0" - } - }, - "locate-path": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", - "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", - "dev": true, - "requires": { - "p-locate": "^3.0.0", - "path-exists": "^3.0.0" - } - }, - "p-limit": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.0.tgz", - "integrity": "sha512-pZbTJpoUsCzV48Mc9Nh51VbwO0X9cuPFE8gYwx9BTCt9SF8/b7Zljd2fVgOxhIF/HDTKgpVzs+GPhyKfjLLFRQ==", - "dev": true, - "requires": { - "p-try": "^2.0.0" - } - }, - "p-locate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", - "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", "dev": true, "requires": { - "p-limit": "^2.0.0" + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" } }, - "p-try": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", - "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "y18n": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", + "integrity": "sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg==", "dev": true - }, - "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } } } }, "yargs-parser": { - "version": "13.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-13.1.1.tgz", - "integrity": "sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==", - "dev": true, - "requires": { - "camelcase": "^5.0.0", - "decamelize": "^1.2.0" - } + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true }, "yorkie": { "version": "2.0.0", diff --git a/flowman-ui/package.json b/flowman-ui/package.json index 99e7dcdec..c73d47f3e 100644 --- a/flowman-ui/package.json +++ b/flowman-ui/package.json @@ -8,24 +8,38 @@ "lint": "vue-cli-service lint" }, "dependencies": { - "core-js": "^2.6.5", - "vue": "^2.6.10", + "core-js": "^3.6.5", + "vue": "^2.6.11", "vue-router": "^3.0.3", - "vuetify": "^1.5.5" + "vuetify": "^2.4.0" }, "devDependencies": { - "@vue/cli-plugin-babel": "^3.9.0", - "@vue/cli-plugin-eslint": "^3.9.0", - "@vue/cli-service": "^3.9.0", + "@vue/cli-plugin-babel": "^4.5.0", + "@vue/cli-plugin-eslint": "^4.5.0", + "@vue/cli-service": "^4.5.0", "axios": "^0.18.0", - "babel-eslint": "^10.0.1", - "eslint": "^5.16.0", - "eslint-plugin-vue": "^5.0.0", - "stylus": "^0.54.5", - "stylus-loader": "^3.0.1", + "babel-eslint": "^10.1.0", + "eslint": "^6.7.2", + "eslint-plugin-vue": "^6.2.2", + "sass": "^1.32.0", + "sass-loader": "^10.0.0", "vue-cli-plugin-axios": "0.0.4", - "vue-cli-plugin-vuetify": "^0.5.0", - "vue-template-compiler": "^2.6.10", - "vuetify-loader": "^1.0.5" + "vue-cli-plugin-vuetify": "^2.1.0", + "vue-template-compiler": "^2.6.11", + "vuetify-loader": "^1.7.0" + }, + "eslintConfig": { + "root": true, + "env": { + "node": true + }, + "extends": [ + "plugin:vue/essential", + "eslint:recommended" + ], + "parserOptions": { + "parser": "babel-eslint" + }, + "rules": {} } } diff --git a/flowman-ui/pom.xml b/flowman-ui/pom.xml index fbfb4afbf..6d5cf4973 100644 --- a/flowman-ui/pom.xml +++ b/flowman-ui/pom.xml @@ -9,7 +9,7 @@ com.dimajix.flowman flowman-root - 0.14.2 + 0.15.0 .. @@ -49,20 +49,6 @@ - - org.apache.maven.plugins - maven-compiler-plugin - - - default-compile - none - - - test-compile - none - - - diff --git a/flowman-ui/src/App.vue b/flowman-ui/src/App.vue index e5da3f25e..8af1038e4 100644 --- a/flowman-ui/src/App.vue +++ b/flowman-ui/src/App.vue @@ -1,17 +1,19 @@