I am trying to run a simple spring boot app with PostgreSQL via docker-compose.
Here is my docker-compose.yml file
version: "3.9"
services:
persons-java-api:
container_name: persons-java-api
build: .
ports:
- "8081:8081"
depends_on:
- persons-db-service
persons-db-service:
container_name: persons-db-service
image: postgres:14.4
environment:
- POSTGRES_USER:postgres
- POSTGRES_PASSWORD:123456
- POSTGRES_DB:persons-db
ports:
- "5432:5432"
Spring application Dockerfile
FROM openjdk:11
WORKDIR /service
COPY target/person-0.0.1-SNAPSHOT.jar /service/app.jar
CMD ["java", "-jar", "app.jar"]
And here is the application.properties file of the spring app
server.port=8081
spring.application.name=person
spring.jpa.properties.hibernate.dialect=org.hibernate.dialect.PostgreSQLDialect
spring.jpa.hibernate.ddl-auto=update
spring.datasource.url=jdbc:postgresql://persons-db-service:5432/persons-db
spring.datasource.username=postgres
spring.datasource.password=123456
#spring.h2.console.enabled=true
spring.jpa.properties.hibernate.format-sql=true
when I run docker-compose up it fails with the following error
Attaching to persons-db-service, persons-java-api
persons-db-service | Error: Database is uninitialized and superuser password is not specified.
persons-db-service | You must specify POSTGRES_PASSWORD to a non-empty value for the
persons-db-service | superuser. For example, "-e POSTGRES_PASSWORD=password" on "docker run".
persons-db-service |
persons-db-service | You may also use "POSTGRES_HOST_AUTH_METHOD=trust" to allow all
persons-db-service | connections without a password. This is *not* recommended.
persons-db-service |
persons-db-service | See PostgreSQL documentation about "trust":
persons-db-service | https://www.postgresql.org/docs/current/auth-trust.html
persons-db-service exited with code 1Attaching to persons-db-service, persons-java-api
persons-db-service | Error: Database is uninitialized and superuser password is not specified.
persons-db-service | You must specify POSTGRES_PASSWORD to a non-empty value for the
persons-db-service | superuser. For example, "-e POSTGRES_PASSWORD=password" on "docker run".
persons-db-service |
persons-db-service | You may also use "POSTGRES_HOST_AUTH_METHOD=trust" to allow all
persons-db-service | connections without a password. This is *not* recommended.
persons-db-service |
persons-db-service | See PostgreSQL documentation about "trust":
persons-db-service | https://www.postgresql.org/docs/current/auth-trust.html
persons-db-service exited with code 1
org.postgresql.util.PSQLException: The connection attempt failed.
persons-java-api | at org.postgresql.core.v3.ConnectionFactoryImpl.openConnectionImpl(ConnectionFactoryImpl.java:331) ~[postgresql-42.3.5.jar!/:42.3.5]
persons-java-api | at org.postgresql.core.ConnectionFactory.openConnection(ConnectionFactory.java:49) ~[postgresql-42.3.5.jar!/:42.3.5]
persons-java-api | at org.postgresql.jdbc.PgConnection.<init>(PgConnection.java:223) ~[postgresql-42.3.5.jar!/:42.3.5]
persons-java-api | at org.postgresql.Driver.makeConnection(Driver.java:402) ~[postgresql-42.3.5.jar!/:42.3.5]
persons-java-api | at org.postgresql.Driver.connect(Driver.java:261) ~[postgresql-42.3.5.jar!/:42.3.5]
persons-java-api | at com.zaxxer.hikari.util.DriverDataSource.getConnection(DriverDataSource.java:138) ~[HikariCP-4.0.3.jar!/:na]
persons-java-api | at com.zaxxer.hikari.pool.PoolBase.newConnection(PoolBase.java:364) ~[HikariCP-4.0.3.jar!/:na]
persons-java-api | at com.zaxxer.hikari.pool.PoolBase.newPoolEntry(PoolBase.java:206) ~[HikariCP-4.0.3.jar!/:na]
persons-java-api | at com.zaxxer.hikari.pool.HikariPool.createPoolEntry(HikariPool.java:476) ~[HikariCP-4.0.3.jar!/:na]
persons-java-api | at com.zaxxer.hikari.pool.HikariPool.checkFailFast(HikariPool.java:561) ~[HikariCP-4.0.3.jar!/:na]
persons-java-api | at com.zaxxer.hikari.pool.HikariPool.<init>(HikariPool.java:115) ~[HikariCP-4.0.3.jar!/:na]
persons-java-api | at com.zaxxer.hikari.HikariDataSource.getConnection(HikariDataSource.java:112) ~[HikariCP-4.0.3.jar!/:na]
persons-java-api | at org.hibernate.engine.jdbc.connections.internal.DatasourceConnectionProviderImpl.getConnection(DatasourceConnectionProviderImpl.java:122) ~[hibernate-core-5.6.9.Final.jar!/:5.6.9.Final]
persons-java-api | at org.hibernate.engine.jdbc.env.internal.JdbcEnvironmentInitiator$ConnectionProviderJdbcConnectionAccess.obtainConnection(JdbcEnvironmentInitiator.java:181) ~[hibernate-core-5.6.9.Final.jar!/:5.6.9.Final]
persons-java-api | Caused by: java.net.UnknownHostException: persons-db
persons-java-api | at java.base/java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:229) ~[na:na]
persons-java-api | at java.base/java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[na:na]
persons-java-api | at java.base/java.net.Socket.connect(Socket.java:609) ~[na:na]
persons-java-api | at org.postgresql.core.PGStream.createSocket(PGStream.java:241) ~[postgresql-42.3.5.jar!/:42.3.5]
persons-java-api | at org.postgresql.core.PGStream.<init>(PGStream.java:98) ~[postgresql-42.3.5.jar!/:42.3.5]
persons-java-api | at org.postgresql.core.v3.ConnectionFactoryImpl.tryConnect(ConnectionFactoryImpl.java:109) ~[postgresql-42.3.5.jar!/:42.3.5]
persons-java-api | at org.postgresql.core.v3.ConnectionFactoryImpl.openConnectionImpl(ConnectionFactoryImpl.java:235) ~[postgresql-42.3.5.jar!/:42.3.5]
persons-java-api | ... 57 common frames omitted
Can someone kindly what I am doing incorrectly here?
Seems that i have to also add spring.datsource.url to compose file
spring.datasource.url=jdbc:postgresql://persons-db-service:5432/persons-db
whole working compose file is
version: "3.9"
services:
persons-java-api:
container_name: persons-java-api
build: .
ports:
- "8081:8081"
environment:
- spring.datasource.url=jdbc:postgresql://persons-db-service:5432/persons-db
depends_on:
- persons-db-service
persons-db-service:
container_name: persons-db-service
image: postgres:${POSTGRES_VERSION}
volumes:
- ./db:/var/lib/postgresql/data
environment:
- POSTGRES_USER=${DB_USER}
- POSTGRES_PASSWORD=${DB_USER_PASSWORD}
- POSTGRES_DB=${DB_NAME}
ports:
- "5432:5432"
The environment variable are coming from .env file
Related
I am running airflow locally based on dockerfile, .env, docker-compose.yaml and entrypoint.sh
as "docker-compose -f docker-compose.yaml up"
And just after "airflow init db" in entrypoint.sh I am getting the following error:
[after it all is cool, I can run airflow. But this drives me crazy. Can anyone help me to resolve it, please?]
what's strange is that service queries the tables even before the db has been initiated
airflow_webserver | initiating db
airflow_webserver | DB: postgresql://airflow:***#airflow_metadb:5432/airflow
airflow_webserver | [2022-02-22 13:52:26,318] {db.py:929} INFO - Dropping tables that exist
airflow_webserver | [2022-02-22 13:52:26,570] {migration.py:201} INFO - Context impl PostgresqlImpl.
airflow_webserver | [2022-02-22 13:52:26,570] {migration.py:204} INFO - Will assume transactional DDL.
airflow_metadb | 2022-02-22 13:52:26.712 UTC [71] ERROR: relation "connection" does not exist at character 55
airflow_metadb | 2022-02-22 13:52:26.712 UTC [71] STATEMENT: SELECT connection.conn_id AS connection_conn_id
airflow_metadb | FROM connection GROUP BY connection.conn_id
airflow_metadb | HAVING count(*) > 1
airflow_metadb | 2022-02-22 13:52:26.714 UTC [72] ERROR: relation "connection" does not exist at character 55
airflow_metadb | 2022-02-22 13:52:26.714 UTC [72] STATEMENT: SELECT connection.conn_id AS connection_conn_id
airflow_metadb | FROM connection
airflow_metadb | WHERE connection.conn_type IS NULL
airflow_webserver | [2022-02-22 13:52:26,733] {db.py:921} INFO - Creating tables
airflow 2.2.3
postgres 13
in dockerfile:
ENTRYPOINT ["/entrypoint.sh"]
in docker-compose.yaml:
webserver:
env_file: ./.env
image: airflow
container_name: airflow_webserver
restart: always
depends_on:
- postgres
environment:
<<: *env_common
AIRFLOW__CORE__LOAD_EXAMPLES: ${AIRFLOW__CORE__LOAD_EXAMPLES}
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: ${AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION}
EXECUTOR: ${EXECUTOR}
_AIRFLOW_DB_UPGRADE: ${_AIRFLOW_DB_UPGRADE}
_AIRFLOW_WWW_USER_CREATE: ${_AIRFLOW_WWW_USER_CREATE}
_AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME}
_AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD}
_AIRFLOW_WWW_USER_ROLE: ${_AIRFLOW_WWW_USER_ROLE}
_AIRFLOW_WWW_USER_EMAIL: ${_AIRFLOW_WWW_USER_EMAIL}
logging:
options:
max-size: 10m
max-file: "3"
volumes:
- ./dags:bla-bla
- ./logs:bla-bla
ports:
- ${AIRFLOW_WEBSERVER_PORT}:${AIRFLOW_WEBSERVER_PORT}
command: webserver
healthcheck:
test: ["CMD-SHELL", "[ -f /usr/local/airflow/airflow-webserver.pid ]"]
interval: 30s
timeout: 30s
retries: 3
I am trying to learn app deployment with Docker. My configurations are as below:
application.properties
####### Mongo Properties ###########
spring.data.mongodb.uri=mongodb://mongo/locationsdb
Dockerfile
FROM openjdk:14-alpine
ARG JAR_FILE=./target/*jar
COPY ${JAR_FILE} jarapp.jar
EXPOSE 8080
ENTRYPOINT ["java", "-Dspring.profiles.active=docker", "-jar", "jarapp.jar"]
docker-compose.yml
version: "3"
services:
mongodb-container:
image: mongo:latest
container_name: "mongodb-container"
restart: always
ports:
- 27017:27017
server-container:
image: server_side
container_name: "server-container"
restart: always
ports:
- 8080:8080
links:
- mongodb-container
depends_on:
- mongodb-container
After the above then I did the following:
docker-compose config
docker-compose up --build
But I was gething the below error:
server-container | 2021-09-02 09:44:41.253 INFO 1 --- [localhost:27017] org.mongodb.driver.cluster : ***Exception in monitor thread while connecting to server localhost:27017***
server-container |
server-container | com.mongodb.MongoSocketOpenException: **Exception opening socket**
server-container | at com.mongodb.internal.connection.SocketStream.open(SocketStream.java:70) ~[mongodb-driver-core-4.2.3.jar!/:na]
server-container | at com.mongodb.internal.connection.InternalStreamConnection.open(InternalStreamConnection.java:143) ~[mongodb-driver-core-4.2.3.jar!/:na]
server-container | at com.mongodb.internal.connection.DefaultServerMonitor$ServerMonitorRunnable.lookupServerDescription(DefaultServerMonitor.java:188) ~[mongodb-driver-core-4.2.3.jar!/:na]
server-container | at com.mongodb.internal.connection.DefaultServerMonitor$ServerMonitorRunnable.run(DefaultServerMonitor.java:144) ~[mongodb-driver-core-4.2.3.jar!/:na]
server-container | at java.base/java.lang.Thread.run(Thread.java:832) ~[na:na]
server-container | Caused by: java.net.ConnectException: Connection refused
server-container | at java.base/sun.nio.ch.Net.pollConnect(Native Method) ~[na:na]
server-container | at java.base/sun.nio.ch.Net.pollConnectNow(Net.java:589) ~[na:na]
server-container | at java.base/sun.nio.ch.NioSocketImpl.timedFinishConnect(NioSocketImpl.java:542) ~[na:na]
server-container | at java.base/sun.nio.ch.NioSocketImpl.connect(NioSocketImpl.java:597) ~[na:na]
server-container | at java.base/java.net.SocksSocketImpl.connect(SocksSocketImpl.java:333) ~[na:na]
server-container | at java.base/java.net.Socket.connect(Socket.java:648) ~[na:na]
server-container | at com.mongodb.internal.connection.SocketStreamHelper.initialize(SocketStreamHelper.java:107) ~[mongodb-driver-core-4.2.3.jar!/:na]
server-container | at com.mongodb.internal.connection.SocketStream.initializeSocket(SocketStream.java:79) ~[mongodb-driver-core-4.2.3.jar!/:na]
server-container | at com.mongodb.internal.connection.SocketStream.open(SocketStream.java:65) ~[mongodb-driver-core-4.2.3.jar!/:na]
server-container | ... 4 common frames omitted
server-container |
server-container | 2021-09-02 09:44:43.395 INFO 1 --- [ main] o.s.b.w.embedded.tomcat.TomcatWebServer : Tomcat started on port(s): 8080 (http) with context path ''
server-container | 2021-09-02 09:44:43.429 INFO 1 --- [ main] c.f.virtuallab.VirtuallabApplication : Started VirtuallabApplication in 26.943 seconds (JVM running for 28.445)
mongodb-container | {"t":{"$date":"2021-09-02T09:45:13.967+00:00"},"s":"I", "c":"STORAGE", "id":22430, "ctx":"Checkpointer","msg":"WiredTiger message","attr":{"message":"[1630575913:967258][1:0x7fef40740700], WT_SESSION.checkpoint: [WT_VERB_CHECKPOINT_PROGRESS] saving checkpoint snapshot min: 34, snapshot max: 34 snapshot count: 0, oldest timestamp: (0, 0) , meta checkpoint timestamp: (0, 0) base write gen: 1"}}
As it shows in the log, There was a Exception opening socket problem and then it says this: server-container | 2021-09-02 09:44:43.429 INFO 1 --- [ main] c.f.virtuallab.VirtuallabApplication : Started VirtuallabApplication in 26.943 seconds (JVM running for 28.445) afterwards.
When i tried my end point: localhost:8080/api/v1/locations I was only getting Internal Server Error (500).
Could someone guide me on how to properly connect the mongodb and get the application started?
Try changing
####### Mongo Properties ###########
spring.data.mongodb.uri=mongodb://mongodb-container/locationsdb
You are using mongo as your mongodb host but you have declared mongodb container as mongodb-container in your docker-compose file. So your mongodb instance should be accessed by mongodb-container and not by mongo.
I think no need to expose mongodb port with host machine if in the frontend it's already programmed. But have to mention same username & password alogwith hostname I mean container name to the mongodb container.
I have dockerized Kafka and Postgres. I use JDBC Sink connector to load data from Kafka topic to Postgres table. First I create a topic and a stream above it with "AVRO" value format.
CREATE STREAM TEST01 (ROWKEY VARCHAR KEY, COL1 INT, COL2 VARCHAR)
WITH (KAFKA_TOPIC='test01', PARTITIONS=1, VALUE_FORMAT='AVRO');
This is the code of creating Sink Connector:
curl -X PUT http://localhost:8083/connectors/sink-jdbc-postgre-01/config \
-H "Content-Type: application/json" -d '{
"connector.class" : "io.confluent.connect.jdbc.JdbcSinkConnector",
"connection.url" : "jdbc:postgresql://postgres:5432/",
"topics" : "test01",
"key.converter" : "org.apache.kafka.connect.storage.StringConverter",
"value.converter" : "io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url": "http://schema-registry:8081",
"connection.user" : "postgres",
"connection.password" : "********",
"auto.create" : true,
"auto.evolve" : true,
"insert.mode" : "insert",
"pk.mode" : "record_key",
"pk.fields" : "MESSAGE_KEY"
}'
Then, I check Postgres if there's any data that came from Kafka by using \dt command and it returns the folllowing: Did not find any relations.
Then I check kafka-connect logs and it return the following result:
[2021-03-30 10:05:07,546] INFO Attempting to open connection #2 to PostgreSql (io.confluent.connect.jdbc.util.CachedConnectionProvider)
connect | [2021-03-30 10:05:07,577] INFO Unable to connect to database on attempt 2/3. Will retry in 10000 ms. (io.confluent.connect.jdbc.util.CachedConnectionProvider)
connect | org.postgresql.util.PSQLException: The connection attempt failed.
connect | at org.postgresql.core.v3.ConnectionFactoryImpl.openConnectionImpl(ConnectionFactoryImpl.java:296)
connect | at org.postgresql.core.ConnectionFactory.openConnection(ConnectionFactory.java:49)
connect | at org.postgresql.jdbc.PgConnection.<init>(PgConnection.java:211)
connect | at org.postgresql.Driver.makeConnection(Driver.java:459)
connect | at org.postgresql.Driver.connect(Driver.java:261)
connect | at java.sql.DriverManager.getConnection(DriverManager.java:664)
connect | at java.sql.DriverManager.getConnection(DriverManager.java:208)
connect | at io.confluent.connect.jdbc.dialect.GenericDatabaseDialect.getConnection(GenericDatabaseDialect.java:224)
connect | at io.confluent.connect.jdbc.util.CachedConnectionProvider.newConnection(CachedConnectionProvider.java:93)
connect | at io.confluent.connect.jdbc.util.CachedConnectionProvider.getConnection(CachedConnectionProvider.java:62)
connect | at io.confluent.connect.jdbc.sink.JdbcDbWriter.write(JdbcDbWriter.java:56)
connect | at io.confluent.connect.jdbc.sink.JdbcSinkTask.put(JdbcSinkTask.java:74)
connect | at org.apache.kafka.connect.runtime.WorkerSinkTask.deliverMessages(WorkerSinkTask.java:546)
connect | at org.apache.kafka.connect.runtime.WorkerSinkTask.poll(WorkerSinkTask.java:326)
connect | at org.apache.kafka.connect.runtime.WorkerSinkTask.iteration(WorkerSinkTask.java:228)
connect | at org.apache.kafka.connect.runtime.WorkerSinkTask.execute(WorkerSinkTask.java:196)
connect | at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:184)
connect | at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:234)
connect | at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
connect | at java.util.concurrent.FutureTask.run(FutureTask.java:266)
connect | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
connect | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
connect | at java.lang.Thread.run(Thread.java:748)
connect | Caused by: java.net.UnknownHostException: postgres
connect | at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:184)
connect | at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
connect | at java.net.Socket.connect(Socket.java:589)
connect | at org.postgresql.core.PGStream.<init>(PGStream.java:81)
connect | at org.postgresql.core.v3.ConnectionFactoryImpl.tryConnect(ConnectionFactoryImpl.java:92)
connect | at org.postgresql.core.v3.ConnectionFactoryImpl.openConnectionImpl(ConnectionFactoryImpl.java:196)
connect | ... 22 more
connect | [2021-03-30 10:05:17,578] INFO Attempting to open connection #3 to PostgreSql (io.confluent.connect.jdbc.util.CachedConnectionProvider)
connect | [2021-03-30 10:05:17,732] ERROR WorkerSinkTask{id=sink-jdbc-postgre-01-0} Task threw an uncaught and unrecoverable exception. Task is being killed and will not recover until manually restarted. Error: org.postgresql.util.PSQLException: The connection attempt failed. (org.apache.kafka.connect.runtime.WorkerSinkTask)
connect | org.apache.kafka.connect.errors.ConnectException: org.postgresql.util.PSQLException: The connection attempt failed.
connect | at io.confluent.connect.jdbc.util.CachedConnectionProvider.getConnection(CachedConnectionProvider.java:69)
connect | at io.confluent.connect.jdbc.sink.JdbcDbWriter.write(JdbcDbWriter.java:56)
connect | at io.confluent.connect.jdbc.sink.JdbcSinkTask.put(JdbcSinkTask.java:74)
connect | at org.apache.kafka.connect.runtime.WorkerSinkTask.deliverMessages(WorkerSinkTask.java:546)
connect | at org.apache.kafka.connect.runtime.WorkerSinkTask.poll(WorkerSinkTask.java:326)
connect | at org.apache.kafka.connect.runtime.WorkerSinkTask.iteration(WorkerSinkTask.java:228)
connect | at org.apache.kafka.connect.runtime.WorkerSinkTask.execute(WorkerSinkTask.java:196)
connect | at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:184)
connect | at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:234)
connect | at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
connect | at java.util.concurrent.FutureTask.run(FutureTask.java:266)
connect | at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
connect | at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
connect | at java.lang.Thread.run(Thread.java:748)
connect | Caused by: org.postgresql.util.PSQLException: The connection attempt failed.
connect | at org.postgresql.core.v3.ConnectionFactoryImpl.openConnectionImpl(ConnectionFactoryImpl.java:296)
connect | at org.postgresql.core.ConnectionFactory.openConnection(ConnectionFactory.java:49)
connect | at org.postgresql.jdbc.PgConnection.<init>(PgConnection.java:211)
connect | at org.postgresql.Driver.makeConnection(Driver.java:459)
connect | at org.postgresql.Driver.connect(Driver.java:261)
connect | at java.sql.DriverManager.getConnection(DriverManager.java:664)
connect | at java.sql.DriverManager.getConnection(DriverManager.java:208)
connect | at io.confluent.connect.jdbc.dialect.GenericDatabaseDialect.getConnection(GenericDatabaseDialect.java:224)
connect | at io.confluent.connect.jdbc.util.CachedConnectionProvider.newConnection(CachedConnectionProvider.java:93)
connect | at io.confluent.connect.jdbc.util.CachedConnectionProvider.getConnection(CachedConnectionProvider.java:62)
connect | ... 13 more
connect | Caused by: java.net.UnknownHostException: postgres
connect | at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:184)
connect | at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
connect | at java.net.Socket.connect(Socket.java:589)
connect | at org.postgresql.core.PGStream.<init>(PGStream.java:81)
connect | at org.postgresql.core.v3.ConnectionFactoryImpl.tryConnect(ConnectionFactoryImpl.java:92)
connect | at org.postgresql.core.v3.ConnectionFactoryImpl.openConnectionImpl(ConnectionFactoryImpl.java:196)
connect | ... 22 more
connect | [2021-03-30 10:05:17,734] ERROR WorkerSinkTask{id=sink-jdbc-postgre-01-0} Task threw an uncaught and unrecoverable exception (org.apache.kafka.connect.runtime.WorkerTask)
I supposed that the problem might be in missing postgresql connector .jar file in /usr/share/java/kafka-connect-jdbc but here it is.
root#connect:/usr/share/java/kafka-connect-jdbc# ls -l
total 8412
-rw-r--r-- 1 root root 17555 Apr 18 2020 common-utils-5.5.0.jar
-rw-r--r-- 1 root root 317816 Apr 18 2020 jtds-1.3.1.jar
-rw-r--r-- 1 root root 230113 Apr 18 2020 kafka-connect-jdbc-5.5.0.jar
-rw-r--r-- 1 root root 927447 Apr 18 2020 postgresql-42.2.10.jar
-rw-r--r-- 1 root root 41139 Apr 18 2020 slf4j-api-1.7.26.jar
-rw-r--r-- 1 root root 7064881 Apr 18 2020 sqlite-jdbc-3.25.2.jar
What could be the solution for that problem?
Thanks to #Robin Moffatt tutorial and #OneCricketeer tip I've found the way to solve this problem. Kafka-connect and Postgres should be in one docker-compose.yml file. I attach the code of docker-compose.yml below. Hope this help to the people who would face with the same problem:
---
version: '3'
services:
zookeeper:
image: confluentinc/cp-zookeeper:5.5.0
hostname: zookeeper
container_name: zookeeper
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
broker:
image: confluentinc/cp-server:5.5.0
hostname: broker
container_name: broker
depends_on:
- zookeeper
ports:
- "9092:9092"
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_CONFLUENT_LICENSE_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker:29092
CONFLUENT_METRICS_REPORTER_ZOOKEEPER_CONNECT: zookeeper:2181
CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
CONFLUENT_METRICS_ENABLE: 'true'
CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous'
schema-registry:
image: confluentinc/cp-schema-registry:5.5.0
hostname: schema-registry
container_name: schema-registry
depends_on:
- zookeeper
- broker
ports:
- "8081:8081"
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
connect:
image: cnfldemos/cp-server-connect-datagen:0.3.2-5.5.0
hostname: connect
container_name: connect
depends_on:
- zookeeper
- broker
- schema-registry
ports:
- "8083:8083"
environment:
CONNECT_BOOTSTRAP_SERVERS: 'broker:29092'
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_ZOOKEEPER_CONNECT: 'zookeeper:2181'
# CLASSPATH required due to CC-2422
CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-5.5.0.jar
CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor"
CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor"
CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
CONNECT_LOG4J_LOGGERS: org.apache.zookeeper=ERROR,org.I0Itec.zkclient=ERROR,org.reflections=ERROR
ksqldb-server:
image: confluentinc/cp-ksqldb-server:5.5.0
hostname: ksqldb-server
container_name: ksqldb-server
depends_on:
- broker
- connect
ports:
- "8088:8088"
environment:
KSQL_CONFIG_DIR: "/etc/ksql"
KSQL_BOOTSTRAP_SERVERS: "broker:29092"
KSQL_HOST_NAME: ksqldb-server
KSQL_LISTENERS: "http://0.0.0.0:8088"
KSQL_CACHE_MAX_BYTES_BUFFERING: 0
KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
KSQL_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor"
KSQL_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor"
KSQL_KSQL_CONNECT_URL: "http://connect:8083"
ksqldb-cli:
image: confluentinc/cp-ksqldb-cli:5.5.0
container_name: ksqldb-cli
depends_on:
- broker
- connect
- ksqldb-server
entrypoint: /bin/sh
tty: true
rest-proxy:
image: confluentinc/cp-kafka-rest:5.5.0
depends_on:
- zookeeper
- broker
- schema-registry
ports:
- 8082:8082
hostname: rest-proxy
container_name: rest-proxy
environment:
KAFKA_REST_HOST_NAME: rest-proxy
KAFKA_REST_BOOTSTRAP_SERVERS: 'broker:29092'
KAFKA_REST_LISTENERS: "http://0.0.0.0:8082"
KAFKA_REST_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
postgres:
image: postgres
restart: always
environment:
POSTGRES_PASSWORD: postgres
ports:
- 5432:5432
Here is the error from your stack trace:
java.net.UnknownHostException: postgres
This means that your Kafka Connect worker machine cannot find the host postgres.
Trying to start up a Spring Boot application that connects to two PostgreSQL databases using docker-compose and I am getting a connection refused when Spring tries to connect to either of these databases.
My configuration is as follows:
docker-compose.yml
version: '3.2'
services:
mydb-1:
container_name: mydb-1
image: mydb-1
ports:
- '5432:5432'
environment:
- POSTGRES_PASSWORD=postgres
- POSTGRES_USER=postgres
- POSTGRES_DB=testdb1
mydb-2:
container_name: mydb-2
image: mydb-2
ports:
- '5433:5432'
environment:
- POSTGRES_PASSWORD=postgres
- POSTGRES_USER=postgres
- POSTGRES_DB=testdb2
my-server:
image: my-server-spring
restart: on-failure
depends_on:
- mydb-1
- mydb-2
environment:
- SPRING_DB1-DATASOURCE_JDBC-URL=jdbc:postgresql://mydb-1:5432/testdb1
- SPRING_DB2-DATASOURCE_JDBC-URL=jdbc:postgresql://mydb-2:5433/testdb2
expose:
- '8080'
ports:
- '8080:8080'
Spring application.properties
spring.db1-datasource.jdbc-url= jdbc:postgresql://localhost:5432/testdb1
spring.db1-datasource.username= postgres
spring.db1-datasource.password= postgres
spring.db1-datasource.driverClassName= org.postgresql.Driver
spring.db2-datasource.jdbc-url= jdbc:postgresql://localhost:5433/testdb2
spring.db2-datasource.username= postgres
spring.db2-datasource.password= postgres
spring.db2-datasource.driverClassName= org.postgresql.Driver
Stacktrace (Part of)
org.postgresql.util.PSQLException: Connection to testdb2:5433 refused. Check that the hostname and port are correct and that the postmaster is accepting TCP/IP connections.
advidi-server_1 | at org.postgresql.core.v3.ConnectionFactoryImpl.openConnectionImpl(ConnectionFactoryImpl.java:285) ~[postgresql-42.2.14.jar!/:42.2.14]
advidi-server_1 | at org.postgresql.core.ConnectionFactory.openConnection(ConnectionFactory.java:49) ~[postgresql-42.2.14.jar!/:42.2.14]
advidi-server_1 | at org.postgresql.jdbc.PgConnection.<init>(PgConnection.java:217) ~[postgresql-42.2.14.jar!/:42.2.14]
advidi-server_1 | at org.postgresql.Driver.makeConnection(Driver.java:458) ~[postgresql-42.2.14.jar!/:42.2.14]
advidi-server_1 | at org.postgresql.Driver.connect(Driver.java:260) ~[postgresql-42.2.14.jar!/:42.2.14]
advidi-server_1 | at com.zaxxer.hikari.util.DriverDataSource.getConnection(DriverDataSource.java:138) ~[HikariCP-3.4.5.jar!/:na]
advidi-server_1 | at com.zaxxer.hikari.pool.PoolBase.newConnection(PoolBase.java:358) ~[HikariCP-3.4.5.jar!/:na]
advidi-server_1 | at com.zaxxer.hikari.pool.PoolBase.newPoolEntry(PoolBase.java:206) ~[HikariCP-3.4.5.jar!/:na]
advidi-server_1 | at com.zaxxer.hikari.pool.HikariPool.createPoolEntry(HikariPool.java:477) ~[HikariCP-3.4.5.jar!/:na]
advidi-server_1 | at com.zaxxer.hikari.pool.HikariPool.checkFailFast(HikariPool.java:560) ~[HikariCP-3.4.5.jar!/:na]
advidi-server_1 | at com.zaxxer.hikari.pool.HikariPool.<init>(HikariPool.java:115) ~[HikariCP-3.4.5.jar!/:na]
advidi-server_1 | at com.zaxxer.hikari.HikariDataSource.getConnection(HikariDataSource.java:112) ~[HikariCP-3.4.5.jar!/:na]
advidi-server_1 | at org.hibernate.engine.jdbc.connections.internal.DatasourceConnectionProviderImpl.getConnection(DatasourceConnectionProviderImpl.java:122) ~[hibernate-core-5.4.18.Final.jar!/:5.4.18.Final]
advidi-server_1 | at org.hibernate.engine.jdbc.env.internal.JdbcEnvironmentInitiator$ConnectionProviderJdbcConnectionAccess.obtainConnection(JdbcEnvironmentInitiator.java:180) ~[hibernate-core-5.4.18.Final.jar!/:5.4.18.Final]
Initially I thought that the problem was that the Spring application was trying to connect to the dbs before their bootstrapping is complete, but that doesn't seem to be the case since with restart: on-failure it should at some point manage to connect.
The default localhost values should also not be a problem since these are replaced by the environment variables in my docker-compose file.
Any ideas?
You're not showing the mydb-1 and mydb-2 image configurations, so I'm practically guessing here.
No need to map the ports to the host. Remove the ports: entries.
The spring container should use port 5432 in order to connect to both DB containers. Each DB container has its own IP so there's no problem with that. Don't connect to localhost because that's the container itself.
No need to specify :5432 in the connection strings because that's the default.
Seems like you have connection settings both in application.properties and the environment: entry in the server configuration. Which is the application using? Anyway, your connection strings should end with mydb-1/testdb1 and mydb-2/testdb2 - nothing more complicated than that.
You're passing SPRING_DB1-DATASOURCE_JDBC-URL in your environment, and not using it in the application.properties. So the variable are not overriding.
You need to either in application.properties use something like
spring.db1-datasource.jdbc-url=$SPRING_DB1-DATASOURCE_JDBC-URL
OR
in environment set the exact name of the variables in your application. properties
- spring.db1-datasource.jdbc-url=jdbc:postgresql://mydb-1:5432/testdb1
I would suggest the second option, which will still allow you to use the application.properties as default values for running locally.
My dockerCompose.yml:
version: "3.7"
networks:
kong-net:
volumes:
kong_data: {}
pghr:
external: true
pginv:
external: true
services:
#######################################
# Postgres: The database used by Kong
#######################################
kong-database:
image: postgres:11
container_name: kong-postgres
restart: on-failure
networks:
- kong-net
volumes:
- kong_data:/var/lib/postgresql/data
environment:
POSTGRES_USER: kong
POSTGRES_PASSWORD: kong
POSTGRES_DB: kong
ports:
- "5434:5434"
healthcheck:
test: ["CMD", "pg_isready", "-U", "kong"]
interval: 30s
timeout: 30s
retries: 3
#######################################
# Kong database migration
#######################################
kong-migration:
image: kong:2.0.3-alpine
command: kong migrations bootstrap
networks:
- kong-net
restart: on-failure
environment:
KONG_DATABASE: postgres
KONG_PG_HOST: kong-database
KONG_PG_DATABASE: kong
KONG_PG_USER: kong
KONG_PG_PASSWORD: kong
depends_on:
- kong-database
#######################################
# Kong: The API Gateway
#######################################
kong:
image: kong:2.0.3-alpine
restart: on-failure
container_name: kong
networks:
- kong-net
environment:
KONG_DATABASE: postgres
KONG_PG_HOST: kong-database
KONG_PG_DATABASE: kong
KONG_PG_USER: kong
KONG_PG_PASSWORD: kong
KONG_PROXY_LISTEN: 0.0.0.0:8000
KONG_PROXY_LISTEN_SSL: 0.0.0.0:8443
KONG_ADMIN_LISTEN: 0.0.0.0:8001
depends_on:
- kong-database
healthcheck:
test: ["CMD", "kong", "health"]
interval: 10s
timeout: 10s
retries: 10
ports:
- "8000:8000"
- "8001:8001"
- "8443:8443"
- "8444:8444"
#######################################
# Konga database prepare
#######################################
konga-prepare:
image: pantsel/konga:latest
command: "-c prepare -a postgres -u postgresql://kong:kong#kong-database:5434/konga"
networks:
- kong-net
restart: on-failure
links:
- kong-database
depends_on:
- kong-database
#######################################
# Konga: Kong GUI
#######################################
konga:
image: pantsel/konga:latest
container_name: konga
restart: always
networks:
- kong-net
environment:
DB_ADAPTER: postgres
DB_HOST: kong-database
DB_USER: kong
TOKEN_SECRET: FUEDASHFUAEHFEUAHFEU;
DB_DATABASE: kong
NODE_ENV: production
depends_on:
- kong-database
ports:
- "1337:1337"
but i get this on docker - compose logs on my container KONG:
kong | 2020/09/02 21:51:04 [error] 1#0: init_by_lua error: /usr/local/share/lua/5.1/kong/cmd/utils/migrations.lua:20: New migrations available; run 'kong migrations up' to proceed
kong | stack traceback:
kong | [C]: in function 'error'
kong | /usr/local/share/lua/5.1/kong/cmd/utils/migrations.lua:20: in function 'check_state'
kong | /usr/local/share/lua/5.1/kong/init.lua:392: in function 'init'
kong | init_by_lua:3: in main chunk
kong | nginx: [error] init_by_lua error: /usr/local/share/lua/5.1/kong/cmd/utils/migrations.lua:20: New migrations available; run 'kong migrations up' to proceed
kong | stack traceback:
kong | [C]: in function 'error'
kong | /usr/local/share/lua/5.1/kong/cmd/utils/migrations.lua:20: in function 'check_state'
kong | /usr/local/share/lua/5.1/kong/init.lua:392: in function 'init'
kong | init_by_lua:3: in main chunk
kong | 2020/09/02 21:51:08 [notice] 1#0: using the "epoll" event method
kong | 2020/09/02 21:51:08 [notice] 1#0: openresty/1.15.8.3
kong | 2020/09/02 21:51:08 [notice] 1#0: built by gcc 9.2.0 (Alpine 9.2.0)
kong | 2020/09/02 21:51:08 [notice] 1#0: OS: Linux 5.4.0-45-generic
kong | 2020/09/02 21:51:08 [notice] 1#0: getrlimit(RLIMIT_NOFILE): 1048576:1048576
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker processes
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 22
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 23
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 24
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 25
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 26
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 27
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 28
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 29
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 30
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 31
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 32
kong | 2020/09/02 21:51:08 [notice] 1#0: start worker process 33
kong | 2020/09/02 21:51:08 [notice] 22#0: *5 [lua] cache_warmup.lua:46: cache_warmup_single_entity(): Preloading 'services' into the core_cache..., context: init_worker_by_lua*
kong | 2020/09/02 21:51:08 [notice] 22#0: *5 [lua] cache_warmup.lua:85: cache_warmup_single_entity(): finished preloading 'services' into the core_cache (in 0ms), context: init_worker_by_lua*
kong | 2020/09/02 21:51:08 [notice] 22#0: *5 [lua] cache_warmup.lua:46: cache_warmup_single_entity(): Preloading 'plugins' into the core_cache..., context: init_worker_by_lua*
kong | 2020/09/02 21:51:08 [notice] 22#0: *5 [lua] cache_warmup.lua:85: cache_warmup_single_entity(): finished preloading 'plugins' into the core_cache (in 0ms), context: init_worker_by_lua*
on my container Konga-prepare:
konga-prepare_1 | debug: Preparing database...
konga-prepare_1 | Using postgres DB Adapter.
konga-prepare_1 | Failed to connect to DB Error: connect ECONNREFUSED 192.168.64.2:5434
konga-prepare_1 | at TCPConnectWrap.afterConnect [as oncomplete] (net.js:1141:16) {
konga-prepare_1 | errno: 'ECONNREFUSED',
konga-prepare_1 | code: 'ECONNREFUSED',
konga-prepare_1 | syscall: 'connect',
konga-prepare_1 | address: '192.168.64.2',
konga-prepare_1 | port: 5434
konga-prepare_1 | }
on my container konga:
konga | _e: error: relation "public.konga_users" does not exist
konga | at Connection.parseE (/app/node_modules/sails-postgresql/node_modules/pg/lib/connection.js:539:11)
konga | at Connection.parseMessage (/app/node_modules/sails-postgresql/node_modules/pg/lib/connection.js:366:17)
konga | at Socket.<anonymous> (/app/node_modules/sails-postgresql/node_modules/pg/lib/connection.js:105:22)
konga | at Socket.emit (events.js:310:20)
konga | at Socket.EventEmitter.emit (domain.js:482:12)
konga | at addChunk (_stream_readable.js:286:12)
konga | at readableAddChunk (_stream_readable.js:268:9)
konga | at Socket.Readable.push (_stream_readable.js:209:10)
konga | at TCP.onStreamRead (internal/stream_base_commons.js:186:23) {
konga | length: 118,
konga | severity: 'ERROR',
konga | code: '42P01',
konga | detail: undefined,
konga | hint: undefined,
konga | position: '377',
konga | internalPosition: undefined,
konga | internalQuery: undefined,
konga | where: undefined,
konga | schema: undefined,
konga | table: undefined,
konga | column: undefined,
konga | dataType: undefined,
konga | constraint: undefined,
konga | file: 'parse_relation.c',
konga | line: '1159',
konga | routine: 'parserOpenTable'
konga | },
konga | rawStack: 'error: relation "public.konga_users" does not exist\n' +
konga | ' at Connection.parseE (/app/node_modules/sails-postgresql/node_modules/pg/lib/connection.js:539:11)\n' +
konga | ' at Connection.parseMessage (/app/node_modules/sails-postgresql/node_modules/pg/lib/connection.js:366:17)\n' +
konga | ' at Socket.<anonymous> (/app/node_modules/sails-postgresql/node_modules/pg/lib/connection.js:105:22)\n' +
konga | ' at Socket.emit (events.js:310:20)\n' +
konga | ' at Socket.EventEmitter.emit (domain.js:482:12)\n' +
konga | ' at addChunk (_stream_readable.js:286:12)\n' +
konga | ' at readableAddChunk (_stream_readable.js:268:9)\n' +
konga | ' at Socket.Readable.push (_stream_readable.js:209:10)\n' +
konga | ' at TCP.onStreamRead (internal/stream_base_commons.js:186:23)',
konga | details: 'Details: error: relation "public.konga_users" does not exist\n'
konga | }
I'm not able to imagine the reason for the error, my dockercompose seems to be configured correctly.
I configured the right port, but I have these errors I tried to fix and I can't.
Production
In case of MySQL or PostgresSQL adapters, Konga will not perform db migrations when running in production mode.
You can manually perform the migrations by calling $ node ./bin/konga.js prepare , passing the args needed for the database connectivity.
so in first run,you need set NODE_ENV as development,not production