簡體   English   中英

Kafka-Elasticsearch 接收器連接器不工作

[英]Kafka-Elasticsearch Sink Connector not working

我正在嘗試將數據從 Kafka 發送到 Elasticsearch。 我檢查了我的 Kafka Broker 是否正常工作,因為我可以看到我生成到某個主題的消息被 Kafka Consumer 讀取。 但是,當我嘗試將 Kafka 連接到 Elasticsearch 時,出現以下錯誤。

命令:

connect-standalone etc/schema-registry/connect-avro-standalone.properties \
etc/kafka-connect-elasticsearch/quickstart-elasticsearch.properties

錯誤:

ERROR Stopping due to error (org.apache.kafka.connect.cli.ConnectStandalone)
org.apache.kafka.connect.errors.ConnectException: Failed to connect to and describe Kafka cluster. Check worker's broker connection and security properties.
    at org.apache.kafka.connect.util.ConnectUtils.lookupKafkaClusterId(ConnectUtils.java:64)
    at org.apache.kafka.connect.util.ConnectUtils.lookupKafkaClusterId(ConnectUtils.java:45)
    at org.apache.kafka.connect.cli.ConnectStandalone.main(ConnectStandalone.java:83)
Caused by: java.util.concurrent.ExecutionException: org.apache.kafka.common.errors.TimeoutException: Timed out waiting for a node assignment.
    at org.apache.kafka.common.internals.KafkaFutureImpl.wrapAndThrow(KafkaFutureImpl.java:45)
    at org.apache.kafka.common.internals.KafkaFutureImpl.access$000(KafkaFutureImpl.java:32)
    at org.apache.kafka.common.internals.KafkaFutureImpl$SingleWaiter.await(KafkaFutureImpl.java:89)
    at org.apache.kafka.common.internals.KafkaFutureImpl.get(KafkaFutureImpl.java:260)
    at org.apache.kafka.connect.util.ConnectUtils.lookupKafkaClusterId(ConnectUtils.java:58)
    ... 2 more
Caused by: org.apache.kafka.common.errors.TimeoutException: Timed out waiting for a node assignment.

我的 Docker 撰寫文件:

version: '3'
services:
  zookeeper:
    container_name : zookeeper
    image: zookeeper
    ports:
     - 2181:2181
     - 2888:2888
     - 3888:3888

  kafka:
    container_name : kafka
    image: bitnami/kafka:1.0.0-r5
    depends_on:
      - zookeeper
    ports:
      - "9092:9092"
    environment:
      KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
      KAFKA_BROKER_ID: "42"
      KAFKA_ADVERTISED_HOST_NAME: "kafka"
      ALLOW_PLAINTEXT_LISTENER: "yes" 
      KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:9092

  elasticsearch:
    container_name : elasticsearch
    image:
      docker.elastic.co/elasticsearch/elasticsearch:7.8.0
    environment:
      - node.name=elasticsearch
      - cluster.name=es-docker-cluster
      - discovery.seed_hosts=elasticsearch
      - bootstrap.memory_lock=false
      - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
      - discovery.type=single-node
    ulimits:
      memlock:
        soft: -1
        hard: -1
    volumes:
      - data99:/usr/share/elasticsearch/data
    ports:
      - 9200:9200
  kibana:
    container_name : kibana
    image: docker.elastic.co/kibana/kibana:7.8.0
    # environment:
      # - SERVER_NAME=Local kibana
      # - SERVER_HOST=0.0.0.0
      # - ELASTICSEARCH_URL=elasticsearch:9400
    ports:
      - "5601:5601"
    depends_on:
      - elasticsearch

  kafka-connect:
    container_name : kafka-connect
    image: confluentinc/cp-kafka-connect:5.3.1
    ports:
      - 8083:8083
    depends_on:
      - zookeeper
      - kafka
    volumes:
      - $PWD/connect-plugins:/connect-plugins
    environment:
      CONNECT_BOOTSTRAP_SERVERS: kafka:9092
      CONNECT_REST_ADVERTISED_HOST_NAME: "localhost"
      CONNECT_REST_PORT: 8083
      CONNECT_GROUP_ID: kafka-connect
      CONNECT_CONFIG_STORAGE_TOPIC: docker-kafka-connect-configs
      CONNECT_OFFSET_STORAGE_TOPIC: docker-kafka-connect-offsets
      CONNECT_STATUS_STORAGE_TOPIC: docker-kafka-connect-status
      CONNECT_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
      CONNECT_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
      CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
      CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
      CONNECT_KEY_CONVERTER-SCHEMAS_ENABLE: "false"
      CONNECT_VALUE_CONVERTER-SCHEMAS_ENABLE: "false"
      CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect"
      CONNECT_LOG4J_ROOT_LOGLEVEL: "ERROR"
      CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR"
      CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1"
      CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1"
      CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1"
      CONNECT_TOPICS: "test-elasticsearch-sink"
      CONNECT_TYPE_NAME: "type.name=kafka-connect"
      CONNECT_PLUGIN_PATH: '/usr/share/java' #'/usr/share/java'
      # Interceptor config
      CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor"
      CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor"
      CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-5.3.1.jar
      CONNECT_KAFKA_HEAP_OPTS: "-Xms256m -Xmx512m"

volumes:
  data99:
    driver: local

我檢查了其他一些問題和答案,但無法找到解決此問題的方法。

提前致謝!

Connect 容器已經啟動了 Connect Distributed Server。 您應該使用 HTTP 和 JSON 屬性來配置彈性連接器,而不是在容器中執行 shell 並發出在容器中運行connect-standalone命令,這些命令默認使用代理。

同樣,默認情況下,Elastic 快速入門文件要求 Elasticsearch 在 Connect 容器中運行

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM