From 5b80bae48dc1b52b03bb0af5d7e61a895da47c2d Mon Sep 17 00:00:00 2001 From: Marc Wodahl Date: Wed, 27 Mar 2024 13:53:30 -0600 Subject: [PATCH 01/18] Update devcontainer to use Java 21 --- .devcontainer/Dockerfile | 44 +++++++++++++++++---------------- .devcontainer/devcontainer.json | 15 ++++------- 2 files changed, 28 insertions(+), 31 deletions(-) diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index d668daf..8482b7c 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,28 +1,30 @@ -# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.217.4/containers/java/.devcontainer/base.Dockerfile +# Install openJDK version 21 (includes maven, gradle, and node) +FROM cimg/openjdk:21.0.2-node -# [Choice] Java version (use -bullseye variants on local arm64/Apple Silicon): 11, 17, 11-bullseye, 17-bullseye, 11-buster, 17-buster -ARG VARIANT="17" -FROM mcr.microsoft.com/vscode/devcontainers/java:0-${VARIANT} +# set user to root to allow apt-get to run +USER root -# [Option] Install Maven -ARG INSTALL_MAVEN="true" -ARG MAVEN_VERSION="3.6.3" -# [Option] Install Gradle -ARG INSTALL_GRADLE="false" -ARG GRADLE_VERSION="" -RUN if [ "${INSTALL_MAVEN}" = "true" ]; then su vscode -c "umask 0002 && . /usr/local/sdkman/bin/sdkman-init.sh && sdk install maven \"${MAVEN_VERSION}\""; fi \ - && if [ "${INSTALL_GRADLE}" = "true" ]; then su vscode -c "umask 0002 && . /usr/local/sdkman/bin/sdkman-init.sh && sdk install gradle \"${GRADLE_VERSION}\""; fi +ARG USERNAME=vscode +ARG USER_UID=1000 +ARG USER_GID=$USER_UID -# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10 -ARG NODE_VERSION="none" -RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi - -# [Optional] Uncomment this section to install additional OS packages. -# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ -# && apt-get -y install --no-install-recommends +# Create non-root user vscode with sudo support +ENV DEBIAN_FRONTEND=noninteractive +RUN apt-get update \ + # + # Create a non-root user to use if preferred - see https://aka.ms/vscode-remote/containers/non-root-user. + && groupadd --gid $USER_GID $USERNAME \ + && useradd -s /bin/bash --uid $USER_UID --gid $USER_GID -m $USERNAME \ + && apt-get install -y sudo \ + && echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME\ + && chmod 0440 /etc/sudoers.d/$USERNAME # [Optional] Uncomment this line to install global node packages. -# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g " 2>&1 +# RUN npm install -g # install kafkacat for testing purposes -RUN apt-get update && apt-get install -y kafkacat \ No newline at end of file +RUN apt-get update && apt-get install -y kafkacat + +# [Optional] Uncomment this section to install additional OS packages. +# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ +# && apt-get -y install --no-install-recommends \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 7356ea1..d36b057 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -4,16 +4,6 @@ "name": "Java", "build": { "dockerfile": "Dockerfile", - "args": { - // Update the VARIANT arg to pick a Java version: 11, 17 - // Append -bullseye or -buster to pin to an OS version. - // Use the -bullseye variants on local arm64/Apple Silicon. - "VARIANT": "11", - // Options - "INSTALL_MAVEN": "true", - "INSTALL_GRADLE": "false", - "NODE_VERSION": "none" - } }, // Set *default* container specific settings.json values on container create. @@ -26,6 +16,11 @@ "vscjava.vscode-java-pack" ], + + "containerEnv": { + "SHELL": "/bin/bash" + }, + // Use 'forwardPorts' to make a list of ports inside the container available locally. // "forwardPorts": [], From 1f9a7a0f7a19670a40c278c5cb017eb814c0eaed Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Tue, 7 May 2024 16:05:18 -0600 Subject: [PATCH 02/18] Removed travis build status from README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index fada90d..ec689c7 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# jpo-sdw-depositor [![Build Status](https://travis-ci.org/usdot-jpo-ode/jpo-sdw-depositor.svg?branch=dev)](https://travis-ci.org/usdot-jpo-ode/jpo-sdw-depositor) [![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=usdot.jpo.ode%3Ajpo-sdw-depositor&metric=alert_status)](https://sonarcloud.io/dashboard?id=usdot.jpo.ode%3Ajpo-sdw-depositor) +# jpo-sdw-depositor [![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=usdot.jpo.ode%3Ajpo-sdw-depositor&metric=alert_status)](https://sonarcloud.io/dashboard?id=usdot.jpo.ode%3Ajpo-sdw-depositor) Subscribes to a Kafka topic and deposits messages to the Situation Data Warehouse (SDW). From d825b7d7e8562c51b21dcd4a65322eb08158a20a Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Tue, 7 May 2024 16:06:08 -0600 Subject: [PATCH 03/18] Removed quality gate status from README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index ec689c7..a8159e8 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# jpo-sdw-depositor [![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=usdot.jpo.ode%3Ajpo-sdw-depositor&metric=alert_status)](https://sonarcloud.io/dashboard?id=usdot.jpo.ode%3Ajpo-sdw-depositor) +# jpo-sdw-depositor Subscribes to a Kafka topic and deposits messages to the Situation Data Warehouse (SDW). From c4af8cfb9c45d56781683eb41178bf5c4f1b69bb Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Tue, 7 May 2024 16:12:59 -0600 Subject: [PATCH 04/18] Changed 'Situation Data Warehouse' to 'Situational Data Exchange' in README & added hyperlink --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a8159e8..533cbdc 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # jpo-sdw-depositor -Subscribes to a Kafka topic and deposits messages to the Situation Data Warehouse (SDW). +Subscribes to a Kafka topic and deposits messages to the [Situational Data Exchange (SDX)](https://sdx.trihydro.com/). # Overview From 63d53e7dc04cc1f54b55b23295eb83af2832d357 Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Tue, 7 May 2024 16:16:15 -0600 Subject: [PATCH 05/18] Adjusted headers in README to increase clarity --- README.md | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 533cbdc..efa3fb2 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ Subscribes to a Kafka topic and deposits messages to the [Situational Data Exchange (SDX)](https://sdx.trihydro.com/). -# Overview +## Overview This is a submodule of the [jpo-ode](https://github.com/usdot-jpo-ode/jpo-ode) repository. It subscribes to a Kafka topic and listens for incoming messages. Upon message arrival, this application deposits it over REST to the SDX. @@ -10,7 +10,7 @@ This is a submodule of the [jpo-ode](https://github.com/usdot-jpo-ode/jpo-ode) r ## Release Notes The current version and release history of the Jpo-sdw-depositor: [Jpo-sdw-depositor Release Notes]() -# Installation and Operation +## Installation and Operation ### Requirements @@ -39,7 +39,7 @@ Use this option when you want to run this module in conjuction with the [jpo-ode -# Configuration Reference +## Configuration Reference **SOME OF THESE PROPERTIES ARE SENSITIVE. DO NOT PUBLISH THEM TO VERSION CONTROL** @@ -58,28 +58,28 @@ You may configure these values in `jpo-sdw-depositor/src/main/resources/applicat | sdw.emailList | SDW_EMAIL_LIST | Comma-delimited email list to send error emails to | error@email.com,test@test.com | sdw.emailFrom | SDW_EMAIL_FROM | Support email to send from | error@email.com -# Confluent Cloud Integration +## Confluent Cloud Integration Rather than using a local kafka instance, this project can utilize an instance of kafka hosted by Confluent Cloud via SASL. -## Environment variables -### Purpose & Usage +### Environment variables +#### Purpose & Usage - The DOCKER_HOST_IP environment variable is used to communicate with the bootstrap server that the instance of Kafka is running on. - The KAFKA_TYPE environment variable specifies what type of kafka connection will be attempted and is used to check if Confluent should be utilized. - The CONFLUENT_KEY and CONFLUENT_SECRET environment variables are used to authenticate with the bootstrap server. -### Values +#### Values - DOCKER_HOST_IP must be set to the bootstrap server address (excluding the port) - KAFKA_TYPE must be set to "CONFLUENT" - CONFLUENT_KEY must be set to the API key being utilized for CC - CONFLUENT_SECRET must be set to the API secret being utilized for CC -## CC Docker Compose File +### CC Docker Compose File There is a provided docker-compose file (docker-compose-confluent-cloud.yml) that passes the above environment variables into the container that gets created. Further, this file doesn't spin up a local kafka instance since it is not required. -## Note +### Note This has only been tested with Confluent Cloud but technically all SASL authenticated Kafka brokers can be reached using this method. -# Unit Testing +## Unit Testing The unit tests can be run by executing the following command from the root directory of the project: ``` mvn test @@ -87,7 +87,7 @@ mvn test It should be noted that Maven & Java are required to run the unit tests. If you do not have Maven or Java installed, you can reopen the project in the provided dev container and run the tests from there. -# Object data consumption +## Object data consumption The KafkaConsumerRestDepositor will accept any string as input to be passed into the SDW. If provided a JSON object, the tokens of "encodedMsg" and "estimatedRemovalDate" will be passed through directly to the SDW in the form of the following: {depositRequests:[{"encodeType": STRING ,"encodedMsg": STRING, "estimatedRemovalDate": STRING}]} From c0608e4fb1447df3885059d49926cea1b1cd8b34 Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Tue, 7 May 2024 16:21:04 -0600 Subject: [PATCH 06/18] Reworded 'Overview' section of README & added link to SDX REST API documentation --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index efa3fb2..628a159 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ Subscribes to a Kafka topic and deposits messages to the [Situational Data Excha ## Overview -This is a submodule of the [jpo-ode](https://github.com/usdot-jpo-ode/jpo-ode) repository. It subscribes to a Kafka topic and listens for incoming messages. Upon message arrival, this application deposits it over REST to the SDX. +This is a submodule of the [jpo-ode](https://github.com/usdot-jpo-ode/jpo-ode) repository. It subscribes to a Kafka topic and listens for incoming messages. Upon message arrival, this application deposits the message to the SDX via [REST API](https://sdx-service.trihydro.com/index.html). ## Release Notes From f8a0d74ef5faaeef505283fe5874ca82c63f59d3 Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Tue, 7 May 2024 16:24:43 -0600 Subject: [PATCH 07/18] Corrected capitalization in 'Release Notes' section of README --- README.md | 3 +-- docs/Release_notes.md | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 628a159..1ddbbaf 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,8 @@ Subscribes to a Kafka topic and deposits messages to the [Situational Data Excha This is a submodule of the [jpo-ode](https://github.com/usdot-jpo-ode/jpo-ode) repository. It subscribes to a Kafka topic and listens for incoming messages. Upon message arrival, this application deposits the message to the SDX via [REST API](https://sdx-service.trihydro.com/index.html). - ## Release Notes -The current version and release history of the Jpo-sdw-depositor: [Jpo-sdw-depositor Release Notes]() +The current version and release history of the jpo-sdw-depositor project: [jpo-sdw-depositor Release Notes]() ## Installation and Operation diff --git a/docs/Release_notes.md b/docs/Release_notes.md index dd65ff4..42a8d7a 100644 --- a/docs/Release_notes.md +++ b/docs/Release_notes.md @@ -1,4 +1,4 @@ -Jpo-sdw-depositor Release Notes +jpo-sdw-depositor Release Notes ---------------------------- Version 1.6.0, released February 2024 From 509cfb20d1a4d4c85de763c501088dc28fd99fb0 Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Tue, 7 May 2024 16:53:27 -0600 Subject: [PATCH 08/18] Revised 'Installation and Operation' section of README --- README.md | 38 ++++++++++++++++++++++++++++---------- 1 file changed, 28 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index 1ddbbaf..8b6dfda 100644 --- a/README.md +++ b/README.md @@ -13,30 +13,48 @@ The current version and release history of the jpo-sdw-depositor project: [jpo-s ### Requirements -- Docker +- [Kafka](https://kafka.apache.org/) +- [Docker](https://www.docker.com/) -### Option 1: Standalone +### Option 1: As ODE submodule +The jpo-sdw-depositor is intended to be run as a submodule of the [jpo-ode](https://github.com/usdot-jpo-ode/jpo-ode) project. The ODE project repository includes a docker-compose file that will run the depositor in conjunction with the ODE by default. The same environment variables mentioned in the [Configuration Reference](#configuration-reference) below will need to be set in the `.env` file in the root of the ODE project. -Use this option when you want to run the depositor by itself. This will listen to any Kafka topic you specify and deposit messages to the Situation Data Exchange. +### Option 2: Standalone (Depositor Only) with Remote Kafka -1. Configure your desired properties. See **Configuration Reference** at the bottom of this README. -2. Rename your `sample.env` file to `.env` if you haven't already done so -3. Execute the `run.sh` script OR execute these commands: +Use this option when you want to run the depositor by itself and you already have a Kafka cluster running remotely. This option will run the depositor in a Docker container and connect to a remote Kafka cluster to listen for messages. The depositor will then deposit these messages to the SDX. + +1. Rename your `sample.env` file to `.env`. This file contains the environment variables that the application will use to connect to Kafka and the SDX. +1. Configure your environment variables in the `.env` file. See the [Configuration Reference](#configuration-reference) below. +1. Execute the `run.sh` script OR execute these commands: ``` -docker build -t jpo-sdw-depositor . +docker build -t jpo-sdw-depositor . docker run --rm --env-file .env jpo-sdw-depositor:latest ``` +### Option 3: With Local Kafka +Use this option when you want to run the depositor and you want to run a local Kafka cluster alongside it. This option will run the depositor and a Kafka cluster in Docker containers. The depositor will listen for messages on the local Kafka cluster and deposit them to the SDX. -### Option 2: As ODE submodule +1. Rename your `sample.env` file to `.env`. This file contains the environment variables that the application will use to connect to Kafka and the SDX. +1. Configure your environment variables in the `.env` file. See the [Configuration Reference](#configuration-reference) below. +1. Run the following command: -** IN PROGRESS! Further instructions pending ODE compatibility. ** +``` +docker compose -f docker-compose-confluent-cloud.yml up --build +``` -Use this option when you want to run this module in conjuction with the [jpo-ode](https://github.com/usdot-jpo-ode/jpo-ode). The only action you must take here is to set the configuration properties in the env file. See the bottom of this README for a reference. +### Option 4: With Confluent Cloud Kafka +Use this option when you want to run the depositor and you want to connect to a Kafka cluster hosted by Confluent Cloud. This option will run the depositor in a Docker container and connect to a Kafka cluster hosted by Confluent Cloud to listen for messages. The depositor will then deposit these messages to the SDX. +1. Rename your `sample.env` file to `.env`. This file contains the environment variables that the application will use to connect to Kafka and the SDX. +1. Configure your environment variables in the `.env` file. See the [Configuration Reference](#configuration-reference) below. +1. Run the following command: +``` +docker compose -f docker-compose-confluent-cloud.yml up --build +``` +See the [Confluent Cloud Integration](#confluent-cloud-integration) section for more information. ## Configuration Reference From c970dddedb0a76dd81c581754ef9caa0eb626999 Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Wed, 8 May 2024 09:22:47 -0600 Subject: [PATCH 09/18] Revised 'Configuration Reference' section of README --- README.md | 12 ++++++++---- sample.env | 1 - src/main/resources/application.properties | 1 - 3 files changed, 8 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 8b6dfda..80fd936 100644 --- a/README.md +++ b/README.md @@ -60,20 +60,24 @@ See the [Confluent Cloud Integration](#confluent-cloud-integration) section for **SOME OF THESE PROPERTIES ARE SENSITIVE. DO NOT PUBLISH THEM TO VERSION CONTROL** -You may configure these values in `jpo-sdw-depositor/src/main/resources/application.properties` or by editing them in the `sample.env` file. +It is recommended to use environment variables to configure the application, rather than hardcoding values in the `application.properties` file. This allows for easier configuration management and better security. -**IMPORTANT** When using the env file method, you must You must rename or duplicate the `sample.env` file to `.env` +Alternatively, you can configure the application by editing the [application.properties](src\main\resources\application.properties) file. + +**IMPORTANT** When using the env file method, you must You must rename or duplicate the `sample.env` file to `.env` and fill in the values for the environment variables. The `.env` file is used to pass environment variables to the Docker container. | Value in `application.properties` | Value as env var (in sample.env) | Description | Example Value | |-----------------------------------|----------------------------------|-------------------------------------------------------|-----------------------------| | sdw.kafkaBrokers | DOCKER_HOST_IP | Host IP ([instructions](https://github.com/usdot-jpo-ode/jpo-ode/wiki/Docker-management#obtaining-docker_host_ip)) | 10.1.2.3 || sdw.groupId | SDW_GROUP_ID | The Kafka group id to be used for message consumption | usdot.jpo.sdw | | -| sdw.kafkaPort | SDW_KAFKA_PORT | Port of the Kafka instance | 9092 | -| sdw.subscriptionTopic | SDW_SUBSCRIPTION_TOPIC | Kafka topic to listen to | topic.J2735TimBroadcastJson | +| sdw.subscriptionTopics | SDW_SUBSCRIPTION_TOPIC | Kafka topic to listen to | topic.J2735TimBroadcastJson | | sdw.destinationUrl | SDW_DESTINATION_URL | Full path of the SDX server address | 127.0.0.1 | | sdw.apikey | SDW_API_KEY | SDX API Key (generated by [SDX](https://sdx.trihydro.com)) | (n/a) | sdw.emailList | SDW_EMAIL_LIST | Comma-delimited email list to send error emails to | error@email.com,test@test.com | sdw.emailFrom | SDW_EMAIL_FROM | Support email to send from | error@email.com +N/A | KAFKA_TYPE | Type of Kafka connection to be used. Options are "LOCAL" or "CONFLUENT" | LOCAL +N/A | CONFLUENT_KEY | Confluent Cloud API Key | (n/a) +N/A | CONFLUENT_SECRET | Confluent Cloud API Secret | (n/a) ## Confluent Cloud Integration Rather than using a local kafka instance, this project can utilize an instance of kafka hosted by Confluent Cloud via SASL. diff --git a/sample.env b/sample.env index a4181a2..f402694 100644 --- a/sample.env +++ b/sample.env @@ -1,6 +1,5 @@ DOCKER_HOST_IP= #SDW_GROUP_ID=usdot.jpo.sdw -#SDW_KAFKA_PORT=9092 #SDW_DESTINATION_URL=https://webapp-integration.cvmvp.com/whtools/rest/v2/ SDW_SUBSCRIPTION_TOPIC= SDW_API_KEY= diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index 917bdc7..643cf73 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -12,7 +12,6 @@ version=${project.version} #Input Properties #================ #sdw.kafkaBrokers=localhost -#sdw.kafkaPort=9092 #sdw.subscriptionTopics = topic.example1 topic.example2 #Output Properties From 00ca2abe870700ab7afd11b09508f824f4598e3f Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Wed, 8 May 2024 09:24:22 -0600 Subject: [PATCH 10/18] Moved 'Confluent Cloud Integration' section to bottom of README --- README.md | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/README.md b/README.md index 80fd936..d90a31a 100644 --- a/README.md +++ b/README.md @@ -79,6 +79,21 @@ N/A | KAFKA_TYPE | Type of Kafka connection to be used. Options are "LOCAL" or " N/A | CONFLUENT_KEY | Confluent Cloud API Key | (n/a) N/A | CONFLUENT_SECRET | Confluent Cloud API Secret | (n/a) +## Unit Testing +The unit tests can be run by executing the following command from the root directory of the project: +``` +mvn test +``` + +It should be noted that Maven & Java are required to run the unit tests. If you do not have Maven or Java installed, you can reopen the project in the provided dev container and run the tests from there. + +## Object data consumption +The KafkaConsumerRestDepositor will accept any string as input to be passed into the SDW. If provided a JSON object, the tokens of "encodedMsg" and "estimatedRemovalDate" will be passed through directly to the SDW in the form of the following: +{depositRequests:[{"encodeType": STRING ,"encodedMsg": STRING, "estimatedRemovalDate": STRING}]} + +If provided a string of non-json form, the value of "encodedMsg" will inherit the passed value and information will be passed to the SDW in the form of the following: +{depositRequests:[{"encodeType": STRING ,"encodedMsg": STRING}]} + ## Confluent Cloud Integration Rather than using a local kafka instance, this project can utilize an instance of kafka hosted by Confluent Cloud via SASL. @@ -98,19 +113,4 @@ Rather than using a local kafka instance, this project can utilize an instance o There is a provided docker-compose file (docker-compose-confluent-cloud.yml) that passes the above environment variables into the container that gets created. Further, this file doesn't spin up a local kafka instance since it is not required. ### Note -This has only been tested with Confluent Cloud but technically all SASL authenticated Kafka brokers can be reached using this method. - -## Unit Testing -The unit tests can be run by executing the following command from the root directory of the project: -``` -mvn test -``` - -It should be noted that Maven & Java are required to run the unit tests. If you do not have Maven or Java installed, you can reopen the project in the provided dev container and run the tests from there. - -## Object data consumption -The KafkaConsumerRestDepositor will accept any string as input to be passed into the SDW. If provided a JSON object, the tokens of "encodedMsg" and "estimatedRemovalDate" will be passed through directly to the SDW in the form of the following: -{depositRequests:[{"encodeType": STRING ,"encodedMsg": STRING, "estimatedRemovalDate": STRING}]} - -If provided a string of non-json form, the value of "encodedMsg" will inherit the passed value and information will be passed to the SDW in the form of the following: -{depositRequests:[{"encodeType": STRING ,"encodedMsg": STRING}]} +This has only been tested with Confluent Cloud but technically all SASL authenticated Kafka brokers can be reached using this method. \ No newline at end of file From 440ea3d8598a9aa0347d6273b87d03e9c3b8014f Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Wed, 8 May 2024 09:28:50 -0600 Subject: [PATCH 11/18] Revised 'Object Data Consumption' section of README --- README.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index d90a31a..7a0a5b1 100644 --- a/README.md +++ b/README.md @@ -87,12 +87,12 @@ mvn test It should be noted that Maven & Java are required to run the unit tests. If you do not have Maven or Java installed, you can reopen the project in the provided dev container and run the tests from there. -## Object data consumption -The KafkaConsumerRestDepositor will accept any string as input to be passed into the SDW. If provided a JSON object, the tokens of "encodedMsg" and "estimatedRemovalDate" will be passed through directly to the SDW in the form of the following: -{depositRequests:[{"encodeType": STRING ,"encodedMsg": STRING, "estimatedRemovalDate": STRING}]} +## Object Data Consumption +The KafkaConsumerRestDepositor will accept any string as input to be passed into the SDX. If provided a JSON object, the tokens of "encodedMsg" and "estimatedRemovalDate" will be passed through directly to the SDX in the form of the following: +> {depositRequests:[{"encodeType": STRING ,"encodedMsg": STRING, "estimatedRemovalDate": STRING}]} -If provided a string of non-json form, the value of "encodedMsg" will inherit the passed value and information will be passed to the SDW in the form of the following: -{depositRequests:[{"encodeType": STRING ,"encodedMsg": STRING}]} +If provided a string of non-json form, the value of "encodedMsg" will inherit the passed value and information will be passed to the SDX in the form of the following: +> {depositRequests:[{"encodeType": STRING ,"encodedMsg": STRING}]} ## Confluent Cloud Integration Rather than using a local kafka instance, this project can utilize an instance of kafka hosted by Confluent Cloud via SASL. From 77cfdac66bfe86f7591f14edb94cfbac05b77904 Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Wed, 8 May 2024 09:41:05 -0600 Subject: [PATCH 12/18] Corrected KAFKA_TYPE description --- README.md | 4 ++-- sample.env | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7a0a5b1..0c1feb6 100644 --- a/README.md +++ b/README.md @@ -75,7 +75,7 @@ Alternatively, you can configure the application by editing the [application.pro | sdw.apikey | SDW_API_KEY | SDX API Key (generated by [SDX](https://sdx.trihydro.com)) | (n/a) | sdw.emailList | SDW_EMAIL_LIST | Comma-delimited email list to send error emails to | error@email.com,test@test.com | sdw.emailFrom | SDW_EMAIL_FROM | Support email to send from | error@email.com -N/A | KAFKA_TYPE | Type of Kafka connection to be used. Options are "LOCAL" or "CONFLUENT" | LOCAL +N/A | KAFKA_TYPE | Type of Kafka connection to be used. Must be set to CONFLUENT, otherwise the application will default to a non-Confluent connection | CONFLUENT N/A | CONFLUENT_KEY | Confluent Cloud API Key | (n/a) N/A | CONFLUENT_SECRET | Confluent Cloud API Secret | (n/a) @@ -105,7 +105,7 @@ Rather than using a local kafka instance, this project can utilize an instance o #### Values - DOCKER_HOST_IP must be set to the bootstrap server address (excluding the port) -- KAFKA_TYPE must be set to "CONFLUENT" +- KAFKA_TYPE must be set to "CONFLUENT", otherwise the application will default to a non-Confluent connection - CONFLUENT_KEY must be set to the API key being utilized for CC - CONFLUENT_SECRET must be set to the API secret being utilized for CC diff --git a/sample.env b/sample.env index f402694..7c8f751 100644 --- a/sample.env +++ b/sample.env @@ -7,6 +7,8 @@ SDW_EMAIL_LIST= SDW_EMAIL_FROM= SPRING_MAIL_HOST= SPRING_MAIL_PORT= + +# Type of Kafka connection to be used. Must be set to CONFLUENT, otherwise the application will default to a non-Confluent connection KAFKA_TYPE= CONFLUENT_KEY= CONFLUENT_SECRET= \ No newline at end of file From 7f53b991d343bba0081db2bcb5f8ae2a1b248a25 Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Wed, 15 May 2024 16:08:02 -0600 Subject: [PATCH 13/18] Fixed local kafka installation instructions referencing CC docker-compose.yml --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0c1feb6..4ddccd6 100644 --- a/README.md +++ b/README.md @@ -40,7 +40,7 @@ Use this option when you want to run the depositor and you want to run a local K 1. Run the following command: ``` -docker compose -f docker-compose-confluent-cloud.yml up --build +docker compose up --build ``` ### Option 4: With Confluent Cloud Kafka From cd4c73167ba404c817b5796a901e8c4f57628cbb Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Wed, 15 May 2024 16:21:16 -0600 Subject: [PATCH 14/18] Removed SDW_GROUP_ID from sample.env & README --- README.md | 2 +- sample.env | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/README.md b/README.md index 4ddccd6..3903eeb 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ Alternatively, you can configure the application by editing the [application.pro | Value in `application.properties` | Value as env var (in sample.env) | Description | Example Value | |-----------------------------------|----------------------------------|-------------------------------------------------------|-----------------------------| -| sdw.kafkaBrokers | DOCKER_HOST_IP | Host IP ([instructions](https://github.com/usdot-jpo-ode/jpo-ode/wiki/Docker-management#obtaining-docker_host_ip)) | 10.1.2.3 || sdw.groupId | SDW_GROUP_ID | The Kafka group id to be used for message consumption | usdot.jpo.sdw | | +| sdw.kafkaBrokers | DOCKER_HOST_IP | Host IP ([instructions](https://github.com/usdot-jpo-ode/jpo-ode/wiki/Docker-management#obtaining-docker_host_ip)) | 10.1.2.3 | | sdw.subscriptionTopics | SDW_SUBSCRIPTION_TOPIC | Kafka topic to listen to | topic.J2735TimBroadcastJson | | sdw.destinationUrl | SDW_DESTINATION_URL | Full path of the SDX server address | 127.0.0.1 | | sdw.apikey | SDW_API_KEY | SDX API Key (generated by [SDX](https://sdx.trihydro.com)) | (n/a) diff --git a/sample.env b/sample.env index 7c8f751..143466c 100644 --- a/sample.env +++ b/sample.env @@ -1,5 +1,4 @@ DOCKER_HOST_IP= -#SDW_GROUP_ID=usdot.jpo.sdw #SDW_DESTINATION_URL=https://webapp-integration.cvmvp.com/whtools/rest/v2/ SDW_SUBSCRIPTION_TOPIC= SDW_API_KEY= From 5a0cb8820807394714a28dc9a3b287259504444a Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Fri, 24 May 2024 13:15:45 -0600 Subject: [PATCH 15/18] Updated `Release_notes.md` for 1.7.0 release --- docs/Release_notes.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/Release_notes.md b/docs/Release_notes.md index 42a8d7a..be6c04f 100644 --- a/docs/Release_notes.md +++ b/docs/Release_notes.md @@ -1,6 +1,16 @@ jpo-sdw-depositor Release Notes ---------------------------- +Version 1.7.0, released June 2024 +---------------------------------------- +### **Summary** +The changes for the jpo-sdw-depositor 1.7.0 release include a Java update for the dev container, as well as revised documentation for accuracy and improved clarity/readability. + +Enhancements in this release +- CDOT PR 19: Updated dev container to use Java 21 +- CDOT PR 20: Revised documentation for accuracy & improved clarity/readability + + Version 1.6.0, released February 2024 ---------------------------------------- From 8d1086ed6433ef868f0043cda5971d2ff373b43e Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Tue, 28 May 2024 11:33:40 -0600 Subject: [PATCH 16/18] Changed version to 1.7.0-SNAPSHOT --- Dockerfile | 4 ++-- pom.xml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index d5ba77b..65c6338 100644 --- a/Dockerfile +++ b/Dockerfile @@ -14,8 +14,8 @@ RUN mvn clean package -DskipTests FROM eclipse-temurin:21-jre-alpine WORKDIR /home -COPY --from=builder /home/target/jpo-sdw-depositor-1.6.0-SNAPSHOT.jar /home +COPY --from=builder /home/target/jpo-sdw-depositor-1.7.0-SNAPSHOT.jar /home ENTRYPOINT ["java", \ "-jar", \ - "/home/jpo-sdw-depositor-1.6.0-SNAPSHOT.jar"] + "/home/jpo-sdw-depositor-1.7.0-SNAPSHOT.jar"] diff --git a/pom.xml b/pom.xml index 6e0657d..2f3eb89 100644 --- a/pom.xml +++ b/pom.xml @@ -13,7 +13,7 @@ usdot.jpo.ode jpo-sdw-depositor - 1.6.0-SNAPSHOT + 1.7.0-SNAPSHOT jar jpo-sdw-depositor From 6dd4020907736e8088fba8029b4d271da725a7a2 Mon Sep 17 00:00:00 2001 From: dmccoystephenson Date: Tue, 11 Jun 2024 07:44:11 -0600 Subject: [PATCH 17/18] Uncommented SDW_DESTINATION_URL in `sample.env` file --- sample.env | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sample.env b/sample.env index 143466c..aedc5ec 100644 --- a/sample.env +++ b/sample.env @@ -1,5 +1,5 @@ DOCKER_HOST_IP= -#SDW_DESTINATION_URL=https://webapp-integration.cvmvp.com/whtools/rest/v2/ +SDW_DESTINATION_URL=https://sdx-service.trihydro.com/api/deposit-multi SDW_SUBSCRIPTION_TOPIC= SDW_API_KEY= SDW_EMAIL_LIST= From e0cefd2ff128d45bd14f8711b23bcaa04eb640bc Mon Sep 17 00:00:00 2001 From: Saikrishna Bairamoni <84093461+SaikrishnaBairamoni@users.noreply.github.com> Date: Wed, 12 Jun 2024 12:13:34 -0400 Subject: [PATCH 18/18] Update dockerhub.yml --- .github/workflows/dockerhub.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/dockerhub.yml b/.github/workflows/dockerhub.yml index c7c00ea..8089e18 100644 --- a/.github/workflows/dockerhub.yml +++ b/.github/workflows/dockerhub.yml @@ -20,8 +20,13 @@ jobs: with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Replcae Docker tag + id: set_tag + run: echo "TAG=$(echo ${GITHUB_REF##*/} | sed 's/\//-/g')" >> $GITHUB_ENV + - name: Build uses: docker/build-push-action@v5 with: push: true - tags: usdotjpoode/jpo-sdw-depositor:${{ github.ref_name }} + tags: usdotjpoode/jpo-sdw-depositor:${{ env.TAG }}