2

我正在尝试从我的 kafka 生产者发送消息并将其流式传输到 spark 流中。但是当我在 spark submit 上运行我的应用程序时,我收到了以下错误。

错误

 Exception in thread "main" java.lang.NoClassDefFoundError: kafka/serializer/StringDecoder
        at com.spark_stream.Main.main(Main.java:37)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:736)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:185)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:210)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:124)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.lang.ClassNotFoundException: kafka.serializer.StringDecoder
        at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
        at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
        ... 10 more

应用代码如下:

主.java

package com.spark_stream;

import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;

import kafka.serializer.StringDecoder;

public class Main {

    public static void main(String[] args) {
        // TODO Auto-generated method stub

         System.out.println( "spark started!" );

            SparkConf conf = new SparkConf()
                    .setAppName("kafka-sandbox")
                    .setMaster("local[*]");
            JavaSparkContext sc = new JavaSparkContext(conf);
            JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(2000));


            Map<String, String> kafkaParams = new HashMap<String, String>();
            kafkaParams.put("metadata.broker.list", "localhost:9092");
            Set<String> topics = Collections.singleton("speed");

            JavaPairInputDStream<String, String> directKafkaStream = KafkaUtils.createDirectStream(ssc,
                    String.class, String.class, StringDecoder.class, StringDecoder.class, kafkaParams, topics);

            directKafkaStream.foreachRDD(rdd -> {
                System.out.println("--- New RDD with " + rdd.partitions().size()
                        + " partitions and " + rdd.count() + " records");
                rdd.foreach(record -> System.out.println(record._2));
            });

            System.out.println( "connection completed" );


            ssc.start();

            ssc.awaitTermination();

            System.out.println( "spark ended!" );

    }

}

Pom.xml

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
  <modelVersion>4.0.0</modelVersion>
  <groupId>com.spark_stream</groupId>
  <artifactId>com.spark_stream</artifactId>
  <version>0.0.1-SNAPSHOT</version>


    <dependencies>

    <dependency> <!-- Spark dependency -->
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_2.10</artifactId>
        <version>1.6.0</version>
    </dependency>

    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-streaming_2.10</artifactId>
        <version>1.6.0</version>
    </dependency>

    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-streaming-kafka_2.10</artifactId>
        <version>1.6.0</version>
    </dependency>


</dependencies>

    <properties>
        <maven.compiler.source>1.8</maven.compiler.source>
        <maven.compiler.target>1.8</maven.compiler.target>
    </properties>
</project>

找不到此错误的解决方案。任何帮助,将不胜感激。

4

3 回答 3

2

看看文档:http ://spark.apache.org/docs/latest/submitting-applications.html#launching-applications-with-spark-submit

更具体的部分:

包含您的应用程序和所有依赖项的捆绑 jar 的路径。

而您的 pom.xml 清楚地表明您正在构建的 jar 没有依赖项。这就是为什么 spark-submit 找不到类 kafka.serializer.StringDecoder。

您可能想要使用一个插件来解决此类问题,该插件将您的依赖项包含在您的 jar 中,maven 程序集插件可以帮助您解决这个问题

于 2017-03-02T23:02:57.450 回答
2

似乎编译器无法找到 kafka jar,因为您没有包含在 pom 文件中。尝试在您的 pom 文件中添加以下依赖项。检查您正在使用的 kafka 版本。

<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka_2.10 -->
<dependency>
    <groupId>org.apache.kafka</groupId>
    <artifactId>kafka_2.10</artifactId>
    <version>0.8.0</version>
</dependency>
于 2017-03-03T09:43:41.570 回答
0

如果您没有捆绑应用程序所需的所有依赖程序集,通常会发生这种情况,尝试构建一个包含所有依赖项的 uber。

我添加了一部分示例 pom 文件,它会做同样的事情。

<build>
        <sourceDirectory>src/main/scala</sourceDirectory>
        <plugins>
            <plugin>
                <groupId>net.alchim31.maven</groupId>
                <artifactId>scala-maven-plugin</artifactId>
                <version>3.1.6</version>
                <executions>
                    <execution>
                        <phase>compile</phase>
                        <goals>
                            <goal>compile</goal>
                            <goal>testCompile</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>

            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-shade-plugin</artifactId>
                <version>2.3</version>
                <executions>
                    <execution>
                        <phase>package</phase>
                        <goals>
                            <goal>shade</goal>
                        </goals>
                        <configuration>
                            <shadedArtifactAttached>true</shadedArtifactAttached>
                            <filters>
                                <filter>
                                    <artifact>*:*</artifact>
                                    <excludes>
                                        <exclude>META-INF/*.SF</exclude>
                                        <exclude>META-INF/*.DSA</exclude>
                                        <exclude>META-INF/*.RSA</exclude>
                                    </excludes>
                                </filter>
                            </filters>
                            <artifactSet>
                                <includes>
                                    <include>*:*</include>
                                </includes>
                            </artifactSet>
                            <transformers>
                                <transformer
                                        implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
                                    <resource>reference.conf</resource>
                                </transformer>
                            </transformers>
                        </configuration>
                    </execution>
                </executions>
            </plugin>

        </plugins>
于 2018-11-21T05:26:19.530 回答