0

我可以使用本地集群运行storm Kafka,但无法使用storm Submitter 下面是我的拓扑代码

谁能帮我解决这个问题:)

package com.org.kafka;

import org.apache.storm.Config;
import org.apache.storm.LocalCluster;
import org.apache.storm.generated.AlreadyAliveException;
import org.apache.storm.generated.AuthorizationException;
import org.apache.storm.generated.InvalidTopologyException;
import org.apache.storm.kafka.KafkaSpout;
import org.apache.storm.kafka.SpoutConfig;
import org.apache.storm.kafka.StringScheme;
import org.apache.storm.kafka.ZkHosts;
import org.apache.storm.spout.SchemeAsMultiScheme;
import org.apache.storm.topology.TopologyBuilder;

import kafka.api.OffsetRequest;

public class KafkaTopology {
public static void main(String[] args)
        throws AlreadyAliveException, InvalidTopologyException,
               AuthorizationException {

    ZkHosts zkHosts = new ZkHosts("localhost:2181");

    SpoutConfig kafkaConfig = new SpoutConfig(zkHosts, "secondTest", "", "id7");
    kafkaConfig.scheme = new SchemeAsMultiScheme(new StringScheme());

    kafkaConfig.startOffsetTime = OffsetRequest.EarliestTime();
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("KafkaSpout", new KafkaSpout(kafkaConfig), 1);
    builder.setBolt("Sentence-bolt", new SentenceBolt(), 1).globalGrouping("KafkaSpout");
    builder.setBolt("PrinterBolt", new PrinterBolt(), 1).globalGrouping("SentenceBolt");
        LocalCluster cluster = new LocalCluster();

    Config conf = new Config();
    StormSubmitter.submitTopology("KafkaStormToplogy", conf, builder.createTopology());
    try {
        System.out.println("Waiting to consume from kafka");

        Thread.sleep(10000);

    }

    catch (Exception exception) {

        System.out.println("Thread interrupted exception : " + exception);
    }
    cluster.killTopology("KafkaToplogy");
    cluster.shutdown();

}

}

我在 worker.log 文件中发现了以下异常。

但是当我查看终端时,它显示已完成提交拓扑:KafkaStormToplogy

2018-01-24 11:58:38.941 o.a.s.d.worker main [ERROR] Error on initialization of server mk-worker
java.lang.RuntimeException: java.io.InvalidClassException: org.apache.storm.kafka.SpoutConfig; local class incompatible: stream classdesc serialVersionUID = -1247769246497567352, local class serialVersionUID = 6814635004761021338
    at org.apache.storm.utils.Utils.javaDeserialize(Utils.java:254) ~[storm-core-1.0.5.jar:1.0.5]
    at org.apache.storm.utils.Utils.getSetComponentObject(Utils.java:504) ~[storm-core-1.0.5.jar:1.0.5]
    at org.apache.storm.daemon.task$get_task_object.invoke(task.clj:74) ~[storm-core-1.0.5.jar:1.0.5]
    at org.apache.storm.daemon.task$mk_task_data$fn__4609.invoke(task.clj:177) ~[storm-core-1.0.5.jar:1.0.5]
    at org.apache.storm.util$assoc_apply_self.invoke(util.clj:931) ~[storm-core-1.0.5.jar:1.0.5]
    at org.apache.storm.daemon.task$mk_task_data.invoke(task.clj:170) ~[storm-core-1.0.5.jar:1.0.5]
    at org.apache.storm.daemon.task$mk_task.invoke(task.clj:181) ~[storm-core-1.0.5.jar:1.0.5]
    at org.apache.storm.daemon.executor$mk_executor$fn__4830.invoke(executor.clj:371) ~[storm-core-1.0.5.jar:1.0.5]
    at clojure.core$map$fn__4553.invoke(core.clj:2622) ~[clojure-1.7.0.jar:?]
    at clojure.lang.LazySeq.sval(LazySeq.java:40) ~[clojure-1.7.0.jar:?]
    at clojure.lang.LazySeq.seq(LazySeq.java:49) ~[clojure-1.7.0.jar:?]
    at clojure.lang.RT.seq(RT.java:507) ~[clojure-1.7.0.jar:?]
    at clojure.core$seq__4128.invoke(core.clj:137) ~[clojure-1.7.0.jar:?]
    at clojure.core.protocols$seq_reduce.invoke(protocols.clj:30) ~[clojure-1.7.0.jar:?]
    at clojure.core.protocols$fn__6506.invoke(protocols.clj:101) ~[clojure-1.7.0.jar:?]
4

1 回答 1

1

我认为这要么是因为你的 Nimbus 类路径与你的工作者类路径上有不同版本的storm-kafka,要么是因为你在不同的 JDK 上运行 Nimbus 和工作者。SpoutConfig(https://github.com/apache/storm/blob/1.x-branch/external/storm-kafka/src/jvm/org/apache/storm/kafka/SpoutConfig.java)应该声明一个serialVersionUID,但事实并非如此。请参阅https://stackoverflow.com/a/285809/8845188以供参考。据我了解,serialVersionUID 是 JVM 在运行时生成的,不同的 JDK 可能会为同一个类生成不同的数字。

我会克隆storm-kafka并将缺少的serialVersionUID字段添加到SpoutConfig,构建storm-kafka并重试。我提出了https://issues.apache.org/jira/browse/STORM-2911来跟踪修复它。欢迎您来看看。

于 2018-01-24T18:33:16.170 回答