簡體   English   中英

java.lang.NoClassDefFoundError: org/apache/spark/Logging

[英]java.lang.NoClassDefFoundError: org/apache/spark/Logging

我總是收到以下錯誤。有人可以幫我嗎?

Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/spark/Logging
    at java.lang.ClassLoader.defineClass1(Native Method)
    at java.lang.ClassLoader.defineClass(ClassLoader.java:763)
    at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142)
    at java.net.URLClassLoader.defineClass(URLClassLoader.java:467)
    at java.net.URLClassLoader.access$100(URLClassLoader.java:73)
    at java.net.URLClassLoader$1.run(URLClassLoader.java:368)
    at java.net.URLClassLoader$1.run(URLClassLoader.java:362)
    at java.security.AccessController.doPrivileged(Native Method)
    at java.net.URLClassLoader.findClass(URLClassLoader.java:361)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
    at com.datastax.spark.connector.japi.DStreamJavaFunctions.<init>(DStreamJavaFunctions.java:24)
    at com.datastax.spark.connector.japi.CassandraStreamingJavaUtil.javaFunctions(CassandraStreamingJavaUtil.java:55)
    at SparkStream.main(SparkStream.java:51)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)
Caused by: java.lang.ClassNotFoundException: org.apache.spark.Logging
    at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
    ... 20 more

當我編譯以下代碼時。 我已經在網上搜索過,但沒有找到解決方案。 添加 saveToCassandra 時出現錯誤。

import com.datastax.spark.connector.japi.CassandraStreamingJavaUtil;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.streaming.Duration;
import org.apache.spark.streaming.api.java.JavaDStream;
import org.apache.spark.streaming.api.java.JavaPairInputDStream;
import org.apache.spark.streaming.api.java.JavaStreamingContext;
import org.apache.spark.streaming.kafka.KafkaUtils;

import java.io.Serializable;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;

import static com.datastax.spark.connector.japi.CassandraJavaUtil.mapToRow;

/**
 * Created by jonas on 10/10/16.
 */
public class SparkStream implements Serializable{
        public static void main(String[] args) throws Exception{
        SparkConf conf = new SparkConf(true)
                .setAppName("TwitterToCassandra")
                .setMaster("local[*]")
                .set("spark.cassandra.connection.host", "127.0.0.1")
                .set("spark.cassandra.connection.port", "9042");
;
        JavaSparkContext sc = new JavaSparkContext(conf);
        JavaStreamingContext ssc = new JavaStreamingContext(sc, new Duration(5000));

        Map<String, String> kafkaParams = new HashMap<>();
        kafkaParams.put("bootstrap.servers", "localhost:9092");
        Set<String> topics = Collections.singleton("Test");

        JavaPairInputDStream<String, String> directKafkaStream = KafkaUtils.createDirectStream(
                ssc,
                String.class,
                String.class,
                kafka.serializer.StringDecoder.class,
                kafka.serializer.StringDecoder.class,
                kafkaParams,
                topics
        );

        JavaDStream<Tweet> createTweet = directKafkaStream.map(s -> createTweet(s._2));


        CassandraStreamingJavaUtil.javaFunctions(createTweet)
                .writerBuilder("mykeyspace", "rawtweet", mapToRow(Tweet.class))
                .saveToCassandra();

        ssc.start();
        ssc.awaitTermination();

    }


    public static Tweet createTweet(String rawKafka){
        String[] splitted = rawKafka.split("\\|");
        Tweet t = new Tweet(splitted[0], splitted[1], splitted[2], splitted[3]);
        return t;
    }
}

我的pom如下。

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.company</groupId>
    <artifactId>Sentiment</artifactId>
    <version>1.0-SNAPSHOT</version>
    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <configuration>
                    <source>1.8</source>
                    <target>1.8</target>
                </configuration>
            </plugin>
        </plugins>
    </build>

    <repositories>
        <repository>
            <id>twitter4j.org</id>
            <name>twitter4j.org Repository</name>
            <url>http://twitter4j.org/maven2</url>
            <releases>
                <enabled>true</enabled>
            </releases>
            <snapshots>
                <enabled>true</enabled>
            </snapshots>
        </repository>
    </repositories>

    <dependencies>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming_2.11</artifactId>
            <version>2.0.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.10</artifactId>
            <version>2.0.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_2.10</artifactId>
            <version>2.0.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
            <version>2.0.1</version>
        </dependency>


        <!-- https://mvnrepository.com/artifact/org.scala-lang/scala-library -->
        <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-library</artifactId>
            <version>2.11.8</version>
        </dependency>



        <!-- https://mvnrepository.com/artifact/com.datastax.spark/spark-cassandra-connector_2.10 -->
        <dependency>
            <groupId>com.datastax.spark</groupId>
            <artifactId>spark-cassandra-connector_2.10</artifactId>
            <version>1.6.2</version>
        </dependency>

        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka_2.10</artifactId>
            <version>0.9.0.0</version>
        </dependency>

        <dependency>
            <groupId>org.twitter4j</groupId>
            <artifactId>twitter4j-core</artifactId>
            <version>[4.0,)</version>
        </dependency>

        <dependency>
            <groupId>org.twitter4j</groupId>
            <artifactId>twitter4j-stream</artifactId>
            <version>4.0.4</version>
        </dependency>

        <dependency>
            <groupId>org.twitter4j</groupId>
            <artifactId>twitter4j-async</artifactId>
            <version>4.0.4</version>
        </dependency>
    </dependencies>


</project>

org.apache.spark.Logging 在 Spark 1.5.2 或更低版本中可用。 它不在 2.0.0 中。 請更改版本如下

    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-streaming_2.11</artifactId>
        <version>1.5.2</version>
    </dependency>

    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_2.10</artifactId>
        <version>1.5.2</version>
    </dependency>

    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-sql_2.10</artifactId>
        <version>1.5.2</version>
    </dependency>

    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
        <version>1.6.2</version>
    </dependency>

該錯誤是因為您將 Spark 2.0 庫與 Spark 1.6 中的連接器(它查找 Spark 1.6 日志記錄類。使用 2.0.5 版本的連接器)一起使用。

這是因為從 1.5.2 開始缺少 org.apache.spark.Logging 類,就像大家說的那樣。 (只有 org.apache.spark.internal.Logging 存在於更高版本中......)

但是似乎沒有任何針對 maven 的解決方案可以解決這種依賴關系,所以我只是嘗試手動將此類添加到 lib 中。 這是我解決問題的方法:

  1. 將 scala org.apache.spark.internal.Logging打包到一個公共 jar 中。 或者從https://raw.githubusercontent.com/swordsmanliu/SparkStreamingHbase/master/lib/spark-core_2.11-1.5.2.logging.jar下載(感謝這位主持人。)

  2. 將 jar 移動到 Spark 集群的 jars 目錄中。

  3. 再次提交您的項目,希望它對您有所幫助。

我通過改變上面提到的 jar 得到了解決方案。

最初,我正在為 spark-kafka-streaming 降級 jar:

<dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
        <version>2.1.1</version></dependency>

我還刪除了我從 spark 和 kafka jar 庫外部添加的多個 sl4j-log4j.jars 和 log4j.jars。

下載 spark-core_2.11-1.5.2.logging.jar 並用作 --jar 選項

spark-submit --class com.SentimentTwiteer --packages "org.apache.spark:spark-streaming-twitter_2.11:1.6.3" --jars /root/Desktop/spark-core_2.11-1.5.2.logging .jar /root/Desktop/SentimentTwiteer.jar consumerKey consumerSecret accessToken accessTokenSecret yoursearchTag

https://github.com/sinhavicky4/SentimentTwiteer

可能導致此問題的原因之一是lib 和 class 沖突 我遇到了這個問題並使用一些Maven 排除解決了它:

<dependency>
       <groupId>org.apache.spark</groupId>
       <artifactId>spark-core_2.11</artifactId>
       <version>2.0.0</version>
       <scope>provided</scope>
       <exclusions>
           <exclusion>
               <groupId>log4j</groupId>
               <artifactId>log4j</artifactId>
           </exclusion>
       </exclusions>
   </dependency>

   <dependency>
       <groupId>org.apache.spark</groupId>
       <artifactId>spark-streaming_2.11</artifactId>
       <version>2.0.0</version>
       <scope>provided</scope>
   </dependency>

   <dependency>
       <groupId>org.apache.spark</groupId>
       <artifactId>spark-streaming-kafka-0-10_2.11</artifactId>
       <version>2.0.0</version>
       <exclusions>
           <exclusion>
               <groupId>org.slf4j</groupId>
               <artifactId>slf4j-log4j12</artifactId>
           </exclusion>
           <exclusion>
               <groupId>log4j</groupId>
               <artifactId>log4j</artifactId>
           </exclusion>
       </exclusions>
   </dependency>

這個 pom.xml 解決了我的問題:

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.10</artifactId>
            <version>1.6.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming_2.10</artifactId>
            <version>1.6.1</version>
        </dependency>

下載了 jar 並在 spark-submit 中使用 --jars,為我工作 spark-submit --class com.SentimentTwiteer --packages "org.apache.spark:spark-streaming-twitter_2.11:1.6.3" --jars /root/Desktop/spark-core_2.11-1.5.2.logging.jar /root/Desktop/SentimentTwiteer.jar XX XX XX XX

下載下面的 Jar,並將其放入您的庫中,它將按預期工作。

https://raw.githubusercontent.com/swordsmanliu/SparkStreamingHbase/master/lib/spark-core_2.11-1.5.2.logging.jar

如果您使用的是 IntelliJ,只需選中“包含具有提供范圍的依賴項”框,這將為您解決問題,而無需處理 pom 或手動下載文件。

它的版本問題嘗試使用最新版本

<dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_2.11</artifactId>
            <version>2.1.0</version>
            <scope>provided</scope>
        </dependency>
         <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming-kafka-0-8_2.11</artifactId>
            <version>2.1.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming_2.11</artifactId>
            <version>2.1.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.11</artifactId>
            <version>2.1.0</version>
        </dependency>

您的依賴項 jar 列表中缺少日志記錄 Jar。 嘗試從 mvn 存儲庫下載“spark-core_2.11-1.5.2.logging”jar,然后將其作為外部 jar 添加到您的 Spark 項目中,您不會收到“java.lang.NoClassDefFoundError: org/apache/spark/”記錄”錯誤。 基於Scala版本,您可以下載jar{2.10,2.11,etc}。

暫無
暫無

聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.

 
粵ICP備18138465號  © 2020-2024 STACKOOM.COM