Skip to content

Commit 21229ed

Browse files
committed
Setup CI for adapter with service dependencies
Signed-off-by: Tim Paine <[email protected]>
1 parent 1ce6f25 commit 21229ed

File tree

11 files changed

+148
-236
lines changed

11 files changed

+148
-236
lines changed

.github/workflows/build.yml

+63-3
Original file line numberDiff line numberDiff line change
@@ -636,7 +636,6 @@ jobs:
636636
env:
637637
CSP_TEST_SKIP_EXAMPLES: "1"
638638

639-
640639
####################################################
641640
#..................................................#
642641
#..|########|..|########|..../####\....|########|..#
@@ -736,7 +735,68 @@ jobs:
736735
###########################################################################################################
737736
# Test Service Adapters #
738737
###########################################################################################################
739-
# Coming soon!
738+
test_adapters:
739+
needs:
740+
- initialize
741+
- build
742+
743+
strategy:
744+
matrix:
745+
os:
746+
- ubuntu-24.04
747+
python-version:
748+
- 3.11
749+
adapter:
750+
- kafka
751+
752+
runs-on: ${{ matrix.os }}
753+
754+
steps:
755+
- name: Checkout
756+
uses: actions/checkout@v4
757+
with:
758+
submodules: recursive
759+
760+
- name: Set up Python ${{ matrix.python-version }}
761+
uses: ./.github/actions/setup-python
762+
with:
763+
version: '${{ matrix.python-version }}'
764+
765+
- name: Install python dependencies
766+
run: make requirements
767+
768+
- name: Install test dependencies
769+
shell: bash
770+
run: sudo apt-get install graphviz
771+
772+
# Download artifact
773+
- name: Download wheel
774+
uses: actions/download-artifact@v4
775+
with:
776+
name: csp-dist-${{ runner.os }}-${{ runner.arch }}-${{ matrix.python-version }}
777+
778+
- name: Install wheel
779+
run: |
780+
python -m pip install -U *manylinux*.whl
781+
python -m pip install -U --no-deps *manylinux*.whl --target .
782+
783+
- name: Spin up adapter service
784+
run: make dockerup ADAPTER=${{ matrix.adapter }} DOCKERARGS="--wait --wait-timeout 30"
785+
786+
- name: Wait a few seconds after docker images have been spun up
787+
run: sleep 30
788+
789+
# Run tests
790+
- name: Setup test flags
791+
shell: bash
792+
run: echo "CSP_TEST_$( echo ${{ matrix.adapter }} | awk '{print toupper($0)}' )=1" >> $GITHUB_ENV
793+
794+
- name: Python Test Steps
795+
run: make test-py TEST_ARGS="-k ${{ matrix.adapter }}"
796+
797+
- name: Spin down adapter service
798+
run: make dockerdown ADAPTER=${{ matrix.adapter }}
799+
if: ${{ always() }}
740800

741801
############################################################################################
742802
#..........................................................................................#
@@ -751,7 +811,6 @@ jobs:
751811
############################################################################################
752812
# Upload Release Artifacts #
753813
############################################################################################
754-
755814
# only publish artifacts on tags, but otherwise this always runs
756815
# Note this whole workflow only triggers on release tags (e.g. "v0.1.0")
757816
publish_release_artifacts:
@@ -763,6 +822,7 @@ jobs:
763822
- test
764823
- test_sdist
765824
- test_dependencies
825+
- test_adapters
766826

767827
if: startsWith(github.ref, 'refs/tags/v')
768828
runs-on: ubuntu-24.04

Makefile

+7-6
Original file line numberDiff line numberDiff line change
@@ -105,21 +105,22 @@ tests: test
105105

106106
.PHONY: dockerup dockerps dockerdown initpodmanmac
107107
ADAPTER := kafka
108-
DOCKER := podman
108+
DOCKER_COMPOSE := docker compose # or podman-compose
109+
DOCKERARGS :=
109110

110111
initpodmanmac:
111112
podman machine stop
112113
podman machine set --cpus 4 --memory 8096
113114
podman machine start
114115

115116
dockerup: ## spin up docker compose services for adapter testing
116-
$(DOCKER) compose -f ci/$(ADAPTER)/docker-compose.yml up -d
117+
$(DOCKER_COMPOSE) -f ci/$(ADAPTER)/docker-compose.yml up -d $(DOCKERARGS)
117118

118-
dockerps: ## spin up docker compose services for adapter testing
119-
$(DOCKER) compose -f ci/$(ADAPTER)/docker-compose.yml ps
119+
dockerps: ## get status of current docker compose services
120+
$(DOCKER_COMPOSE) -f ci/$(ADAPTER)/docker-compose.yml ps
120121

121-
dockerdown: ## spin up docker compose services for adapter testing
122-
$(DOCKER) compose -f ci/$(ADAPTER)/docker-compose.yml down
122+
dockerdown: ## spin down docker compose services for adapter testing
123+
$(DOCKER_COMPOSE) -f ci/$(ADAPTER)/docker-compose.yml down
123124

124125
###########
125126
# VERSION #

ci/kafka/docker-compose.yml

+26-167
Original file line numberDiff line numberDiff line change
@@ -1,181 +1,40 @@
1-
# https://docs.confluent.io/platform/current/platform-quickstart.html
2-
# https://raw.githubusercontent.com/confluentinc/cp-all-in-one/7.5.3-post/cp-all-in-one-kraft/docker-compose.yml
1+
# https://github.com/conduktor/kafka-stack-docker-compose
32
---
4-
version: '2'
3+
version: '2.1'
4+
55
services:
6-
zookeeper:
7-
image: confluentinc/cp-zookeeper:7.5.3
8-
hostname: zookeeper
9-
container_name: zookeeper
6+
zoo1:
7+
image: confluentinc/cp-zookeeper:7.3.2
8+
hostname: zoo1
9+
container_name: zoo1
1010
ports:
1111
- "2181:2181"
1212
environment:
1313
ZOOKEEPER_CLIENT_PORT: 2181
14-
ZOOKEEPER_TICK_TIME: 2000
14+
ZOOKEEPER_SERVER_ID: 1
15+
ZOOKEEPER_SERVERS: zoo1:2888:3888
1516

16-
broker:
17-
image: confluentinc/cp-server:7.5.3
18-
hostname: broker
19-
container_name: broker
20-
depends_on:
21-
- zookeeper
17+
kafka1:
18+
image: confluentinc/cp-kafka:7.3.2
19+
hostname: kafka1
20+
container_name: kafka1
2221
ports:
2322
- "9092:9092"
24-
- "9101:9101"
23+
- "29092:29092"
24+
- "9999:9999"
2525
environment:
26+
KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka1:19092,EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092,DOCKER://host.docker.internal:29092
27+
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,DOCKER:PLAINTEXT
28+
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
29+
KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
2630
KAFKA_BROKER_ID: 1
27-
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
28-
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
29-
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092
30-
KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
31+
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
3132
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
32-
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
33-
KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1
34-
KAFKA_CONFLUENT_BALANCER_TOPIC_REPLICATION_FACTOR: 1
35-
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
3633
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
37-
KAFKA_JMX_PORT: 9101
38-
KAFKA_JMX_HOSTNAME: localhost
39-
KAFKA_CONFLUENT_SCHEMA_REGISTRY_URL: http://schema-registry:8081
40-
CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker:29092
41-
CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
42-
CONFLUENT_METRICS_ENABLE: 'true'
43-
CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous'
44-
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
45-
TOPIC_AUTO_CREATE: 'true'
46-
47-
schema-registry:
48-
image: confluentinc/cp-schema-registry:7.5.3
49-
hostname: schema-registry
50-
container_name: schema-registry
51-
depends_on:
52-
- broker
53-
ports:
54-
- "8081:8081"
55-
environment:
56-
SCHEMA_REGISTRY_HOST_NAME: schema-registry
57-
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:29092'
58-
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
59-
60-
connect:
61-
image: cnfldemos/cp-server-connect-datagen:0.6.2-7.5.0
62-
hostname: connect
63-
container_name: connect
64-
depends_on:
65-
- broker
66-
- schema-registry
67-
ports:
68-
- "8083:8083"
69-
environment:
70-
CONNECT_BOOTSTRAP_SERVERS: 'broker:29092'
71-
CONNECT_REST_ADVERTISED_HOST_NAME: connect
72-
CONNECT_GROUP_ID: compose-connect-group
73-
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
74-
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
75-
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
76-
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
77-
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
78-
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
79-
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
80-
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
81-
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
82-
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
83-
# CLASSPATH required due to CC-2422
84-
CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-7.5.3.jar
85-
CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor"
86-
CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor"
87-
CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
88-
CONNECT_LOG4J_LOGGERS: org.apache.zookeeper=ERROR,org.I0Itec.zkclient=ERROR,org.reflections=ERROR
89-
90-
control-center:
91-
image: confluentinc/cp-enterprise-control-center:7.5.3
92-
hostname: control-center
93-
container_name: control-center
94-
depends_on:
95-
- broker
96-
- schema-registry
97-
- connect
98-
- ksqldb-server
99-
ports:
100-
- "9021:9021"
101-
environment:
102-
CONTROL_CENTER_BOOTSTRAP_SERVERS: 'broker:29092'
103-
CONTROL_CENTER_CONNECT_CONNECT-DEFAULT_CLUSTER: 'connect:8083'
104-
CONTROL_CENTER_KSQL_KSQLDB1_URL: "http://ksqldb-server:8088"
105-
CONTROL_CENTER_KSQL_KSQLDB1_ADVERTISED_URL: "http://localhost:8088"
106-
CONTROL_CENTER_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
107-
CONTROL_CENTER_REPLICATION_FACTOR: 1
108-
CONTROL_CENTER_INTERNAL_TOPICS_PARTITIONS: 1
109-
CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS: 1
110-
CONFLUENT_METRICS_TOPIC_REPLICATION: 1
111-
PORT: 9021
112-
113-
ksqldb-server:
114-
image: confluentinc/cp-ksqldb-server:7.5.3
115-
hostname: ksqldb-server
116-
container_name: ksqldb-server
117-
depends_on:
118-
- broker
119-
- connect
120-
ports:
121-
- "8088:8088"
122-
environment:
123-
KSQL_CONFIG_DIR: "/etc/ksql"
124-
KSQL_BOOTSTRAP_SERVERS: "broker:29092"
125-
KSQL_HOST_NAME: ksqldb-server
126-
KSQL_LISTENERS: "http://0.0.0.0:8088"
127-
KSQL_CACHE_MAX_BYTES_BUFFERING: 0
128-
KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
129-
KSQL_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor"
130-
KSQL_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor"
131-
KSQL_KSQL_CONNECT_URL: "http://connect:8083"
132-
KSQL_KSQL_LOGGING_PROCESSING_TOPIC_REPLICATION_FACTOR: 1
133-
KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: 'true'
134-
KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: 'true'
135-
136-
# ksqldb-cli:
137-
# image: confluentinc/cp-ksqldb-cli:7.5.3
138-
# container_name: ksqldb-cli
139-
# depends_on:
140-
# - broker
141-
# - connect
142-
# - ksqldb-server
143-
# entrypoint: /bin/sh
144-
# tty: true
145-
146-
# ksql-datagen:
147-
# image: confluentinc/ksqldb-examples:7.5.3
148-
# hostname: ksql-datagen
149-
# container_name: ksql-datagen
150-
# depends_on:
151-
# - ksqldb-server
152-
# - broker
153-
# - schema-registry
154-
# - connect
155-
# command: "bash -c 'echo Waiting for Kafka to be ready... && \
156-
# cub kafka-ready -b broker:29092 1 40 && \
157-
# echo Waiting for Confluent Schema Registry to be ready... && \
158-
# cub sr-ready schema-registry 8081 40 && \
159-
# echo Waiting a few seconds for topic creation to finish... && \
160-
# sleep 11 && \
161-
# tail -f /dev/null'"
162-
# environment:
163-
# KSQL_CONFIG_DIR: "/etc/ksql"
164-
# STREAMS_BOOTSTRAP_SERVERS: broker:29092
165-
# STREAMS_SCHEMA_REGISTRY_HOST: schema-registry
166-
# STREAMS_SCHEMA_REGISTRY_PORT: 8081
167-
168-
rest-proxy:
169-
image: confluentinc/cp-kafka-rest:7.5.3
34+
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
35+
KAFKA_JMX_PORT: 9999
36+
KAFKA_JMX_HOSTNAME: ${DOCKER_HOST_IP:-127.0.0.1}
37+
KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
38+
KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
17039
depends_on:
171-
- broker
172-
- schema-registry
173-
ports:
174-
- 8082:8082
175-
hostname: rest-proxy
176-
container_name: rest-proxy
177-
environment:
178-
KAFKA_REST_HOST_NAME: rest-proxy
179-
KAFKA_REST_BOOTSTRAP_SERVERS: 'broker:29092'
180-
KAFKA_REST_LISTENERS: "http://0.0.0.0:8082"
181-
KAFKA_REST_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
40+
- zoo1

conda/dev-environment-unix.yml

-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ dependencies:
1515
- flex
1616
- graphviz
1717
- gtest
18-
- httpx>=0.20,<1
1918
- isort>=5,<6
2019
- libarrow=16
2120
- libboost>=1.80.0

conda/dev-environment-win.yml

-1
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@ dependencies:
1313
- exprtk
1414
- graphviz
1515
- gtest
16-
- httpx>=0.20,<1
1716
- isort>=5,<6
1817
- libarrow=16
1918
- libboost>=1.80.0

csp/adapters/kafka.py

+4-10
Original file line numberDiff line numberDiff line change
@@ -6,18 +6,12 @@
66
import csp
77
from csp import ts
88
from csp.adapters.status import Status
9-
from csp.adapters.utils import (
10-
BytesMessageProtoMapper,
11-
DateTimeType,
12-
JSONTextMessageMapper,
13-
MsgMapper,
14-
RawBytesMessageMapper,
15-
RawTextMessageMapper,
16-
)
9+
from csp.adapters.utils import MsgMapper
1710
from csp.impl.wiring import input_adapter_def, output_adapter_def, status_adapter_def
1811
from csp.lib import _kafkaadapterimpl
1912

20-
_ = BytesMessageProtoMapper, DateTimeType, JSONTextMessageMapper, RawBytesMessageMapper, RawTextMessageMapper
13+
__all__ = ("KafkaStatusMessageType", "KafkaStartOffset", "KafkaAdapterManager")
14+
2115
T = TypeVar("T")
2216

2317

@@ -73,7 +67,7 @@ def __init__(
7367

7468
consumer_properties = {
7569
"group.id": group_id,
76-
# To get end of parition notification for live / not live flag
70+
# To get end of partition notification for live / not live flag
7771
"enable.partition.eof": "true",
7872
}
7973

csp/tests/adapters/conftest.py

+2-3
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,12 @@
55

66
@pytest.fixture(scope="module", autouse=True)
77
def kafkabroker():
8+
# Defined in ci/kafka/docker-compose.yml
89
return "localhost:9092"
910

1011

1112
@pytest.fixture(scope="module", autouse=True)
1213
def kafkaadapter(kafkabroker):
1314
group_id = "group.id123"
14-
_kafkaadapter = KafkaAdapterManager(
15-
broker=kafkabroker, group_id=group_id, rd_kafka_conf_options={"allow.auto.create.topics": "true"}
16-
)
15+
_kafkaadapter = KafkaAdapterManager(broker=kafkabroker, group_id=group_id)
1716
return _kafkaadapter

0 commit comments

Comments
 (0)