1

我正在遵循与此博客文章中类似的示例:

https://rmoff.net/2019/11/12/running-dockerised-kafka-connect-worker-on-gcp/

除了我不是在 GCP 上而是在本地运行 kafka connect worker 之外。

一切都很好,我运行了 docker-compose up 并启动​​了 kafka 连接,但是当我尝试通过 CURL 创建源连接器的实例时,我收到以下模棱两可的消息(注意:kafka 连接日志中实际上没有输出任何日志):

{"error_code":400,"message":"Connector configuration is invalid and contains the following 1 error(s):\nUnable to connect to the server.\nYou can also find the above list of errors at the endpoint `/{connectorType}/config/validate`"}

我知道我可以连接到融合云,因为我看到正在创建主题:

docker-connect-configs  
docker-connect-offsets  
docker-connect-status  

我的 docker-compose.yml 看起来像这样:

---
  version: '2'
  services:
      
        kafka-connect-01:
          image: confluentinc/cp-kafka-connect:5.4.0
          container_name: kafka-connect-01
          restart: always
          depends_on:
            # - zookeeper
            # - kafka
            - schema-registry
          ports:
            - 8083:8083
          environment:
            CONNECT_LOG4J_APPENDER_STDOUT_LAYOUT_CONVERSIONPATTERN: "[%d] %p %X{connector.context}%m (%c:%L)%n"
            CONNECT_BOOTSTRAP_SERVERS: "my-server-name.confluent.cloud:9092"
            CONNECT_REST_PORT: 8083
            CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect-01"
            CONNECT_GROUP_ID: compose-connect-group
            CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
            CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
            CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
            #CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
            CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://my-server-name.confluent.cloud:8081'
            #CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
            CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://my-server-name.confluent.cloud:8081'
            CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
            CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
            CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
            CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR"
            CONNECT_REPLICATION_FACTOR: "3"
            CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "3"
            CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "3"
            CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "3"
            CONNECT_PLUGIN_PATH: '/usr/share/java'
            CONNECT_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
            CONNECT_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
            #ENV VARS FOR CCLOUD CONNECTION
            CONNECT_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: "https"
            CONNECT_SASL_MECHANISM: PLAIN
            CONNECT_SECURITY_PROTOCOL: SASL_SSL
            CONNECT_SASL_JAAS_CONFIG: "${SASL_JAAS_CONFIG}"
            CONNECT_CONSUMER_SECURITY_PROTOCOL: SASL_SSL
            CONNECT_CONSUMER_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: https
            CONNECT_CONSUMER_SASL_MECHANISM: PLAIN
            CONNECT_CONSUMER_SASL_JAAS_CONFIG: "${SASL_JAAS_CONFIG}"
            CONNECT_PRODUCER_SECURITY_PROTOCOL: SASL_SSL
            CONNECT_PRODUCER_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: https
            CONNECT_PRODUCER_SASL_MECHANISM: PLAIN
            CONNECT_PRODUCER_SASL_JAAS_CONFIG: "${SASL_JAAS_CONFIG}"
    
          volumes:
            - db-leach:/db-leach/
            - $PWD/connectors:/usr/share/java/kafka-connect-jdbc/jars/
          command: 
            - /bin/bash
            - -c 

我已经运行了 dockerized mongo 实例,我想创建 mongo 源连接器,这是我的 CURL 请求:

    curl -X PUT http://localhost:8083/connectors/my-mongo-source-connector/config -H "Content-Type: application/json" -d '{
    "tasks.max":"1",
    "connector.class":"com.mongodb.kafka.connect.MongoSourceConnector",
    "connection.uri":"mongodb://mongo1:27017,mongo2:27017,mongo3:27017",
    "topic.prefix":"topic.prefix",
    "topic.suffix":"mySuffix",
    "database":"myMongoDB",
    "collection":"myMongoCollection",
    "copy.existing": "true",
    "output.format.key": "json",
    "output.format.value": "json",
    "change.stream.full.document": "updateLookup",
    "publish.full.document.only": "false",
    "confluent.topic.bootstrap.servers" : "'${CCLOUD_BROKER_HOST}':9092", 
    "confluent.topic.sasl.jaas.config" : "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"'${CCLOUD_API_KEY}'\" password=\"'${CCLOUD_API_SECRET}'\";", 
    "confluent.topic.security.protocol": "SASL_SSL", 
    "confluent.topic.ssl.endpoint.identification.algorithm": "https", 
    "confluent.topic.sasl.mechanism": "PLAIN" 
    }';

我错过了什么?

4

1 回答 1

1

我设法让它工作,这是一个正确的配置......

消息“无法连接到服务器”是因为我错误地部署了 mongo 实例,因此它与 kafka-connect 或融合云无关。

如果将来有人为此苦苦挣扎,我将把这个问题作为一个例子。我花了一段时间才弄清楚如何为连接到融合云的 kafka-connect 配置 docker-compose。

于 2021-06-11T14:27:51.633 回答