mirror of
https://github.com/pcvolkmer/etl-processor.git
synced 2025-04-19 17:26:51 +00:00
test: add kafka test/dev environment docker-compose.dev.yml; add README_TEST_WITH_GPAS.md
This commit is contained in:
parent
4c0a444725
commit
e9e7139ca4
11
dev/README_TEST_WITH_GPAS.md
Normal file
11
dev/README_TEST_WITH_GPAS.md
Normal file
@ -0,0 +1,11 @@
|
||||
# Test with gPAS
|
||||
1. Download from [Latest Docker-compose version of gPAS](https://www.ths-greifswald.de/gpas/#_download "")
|
||||
2. copy `./demo/demo_gpas.sql` into `./sqls` folder
|
||||
3. if needed change port mapping
|
||||
4. startup via `docker compose up -d`
|
||||
|
||||
By default, PSN are created via `localhost:8080/ttp-fhir/fhir/gpas/$pseudonymizeAllowCreate` endpoint
|
||||
You can review created PSN via gPAs web interface running at `http://localhost:8080/gpas-web/`
|
||||
|
||||
|
||||
|
90
dev/docker-compose.dev.yml
Normal file
90
dev/docker-compose.dev.yml
Normal file
@ -0,0 +1,90 @@
|
||||
version: '3.7'
|
||||
|
||||
services:
|
||||
|
||||
zoo1:
|
||||
image: zookeeper:3.8.0
|
||||
hostname: zoo1
|
||||
ports:
|
||||
- "2181:2181"
|
||||
environment:
|
||||
ZOO_MY_ID: 1
|
||||
ZOO_PORT: 2181
|
||||
ZOO_SERVERS: server.1=zoo1:2888:3888;2181
|
||||
|
||||
kafka1:
|
||||
image: confluentinc/cp-kafka:7.2.1
|
||||
hostname: kafka1
|
||||
ports:
|
||||
- "9092:9092"
|
||||
environment:
|
||||
KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092
|
||||
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT
|
||||
KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL
|
||||
KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
|
||||
KAFKA_BROKER_ID: 1
|
||||
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
|
||||
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
|
||||
depends_on:
|
||||
- zoo1
|
||||
|
||||
kafka-rest-proxy:
|
||||
image: confluentinc/cp-kafka-rest:7.2.1
|
||||
hostname: kafka-rest-proxy
|
||||
ports:
|
||||
- "8082:8082"
|
||||
environment:
|
||||
# KAFKA_REST_ZOOKEEPER_CONNECT: zoo1:2181
|
||||
KAFKA_REST_LISTENERS: http://0.0.0.0:8082/
|
||||
KAFKA_REST_SCHEMA_REGISTRY_URL: http://kafka-schema-registry:8081/
|
||||
KAFKA_REST_HOST_NAME: kafka-rest-proxy
|
||||
KAFKA_REST_BOOTSTRAP_SERVERS: PLAINTEXT://kafka1:19092
|
||||
depends_on:
|
||||
- zoo1
|
||||
- kafka1
|
||||
|
||||
kafka-connect:
|
||||
image: confluentinc/cp-kafka-connect:7.2.1
|
||||
hostname: kafka-connect
|
||||
ports:
|
||||
- "8083:8083"
|
||||
environment:
|
||||
CONNECT_BOOTSTRAP_SERVERS: "kafka1:19092"
|
||||
CONNECT_REST_PORT: 8083
|
||||
CONNECT_GROUP_ID: compose-connect-group
|
||||
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
|
||||
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
|
||||
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
|
||||
CONNECT_KEY_CONVERTER: "org.apache.kafka.connect.storage.StringConverter"
|
||||
CONNECT_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
|
||||
CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect"
|
||||
CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
|
||||
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR"
|
||||
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1"
|
||||
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1"
|
||||
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1"
|
||||
CONNECT_PLUGIN_PATH: "/usr/share/java,/etc/kafka-connect/jars"
|
||||
#volumes:
|
||||
# - ./connectors:/etc/kafka-connect/jars/
|
||||
depends_on:
|
||||
- zoo1
|
||||
- kafka1
|
||||
- kafka-rest-proxy
|
||||
|
||||
akhq:
|
||||
image: tchiotludo/akhq:0.21.0
|
||||
environment:
|
||||
AKHQ_CONFIGURATION: |
|
||||
akhq:
|
||||
connections:
|
||||
docker-kafka-server:
|
||||
properties:
|
||||
bootstrap.servers: "kafka1:19092"
|
||||
connect:
|
||||
- name: "kafka-connect"
|
||||
url: "http://kafka-connect:8083"
|
||||
ports:
|
||||
- "8084:8080"
|
||||
depends_on:
|
||||
- kafka1
|
||||
- kafka-connect
|
Loading…
x
Reference in New Issue
Block a user