简体   繁体   中英

Exception in thread “main” java.lang.NoSuchMethodError: org.apache.spark.sql.SQLContext.sql(Ljava/lang/String;)Lorg/apache/spark/sql/Dataset;

I am trying to use Spark SQL from Scala IDE which I setup without Maven. I have Spark 1.5.1 in production environment and trying to execute following code through
spark-submit --class com.dataflair.spark.Wordcount --master cluster /home/appserver/Sparkjob.jar

package com.dataflair.spark
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

import org.apache.spark.SparkContext._

import org.apache.spark.sql.SQLContext

import org.slf4j.Logger
import org.slf4j.LoggerFactory;

object Wordcount {
def main(args: Array[String]) {
//Create conf object
val conf = new SparkConf()
.setAppName("WordCount")
//create spark context object
val sc = new SparkContext(conf)
//Check whether sufficient params are supplied
val sqlcontext=new org.apache.spark.sql.SQLContext(sc)

//Check whether sufficient params are supplied

import sqlcontext.implicits._




val dp_new=sqlcontext.sql("  SELECT * FROM  Temp_UFDR_t1 ")

dp_new.registerTempTable("DFUO")


//stop the spark context
sc.stop
}
}

I am getting following issue

Exception in thread "main" java.lang.NoSuchMethodError: org.apache.spark.sql.SQLContext.sql(Ljava/lang/String;)Lorg/apache/spark/sql/Dataset;
        at com.dataflair.spark.Wordcount$.main(Wordcount.scala:26)
        at com.dataflair.spark.Wordcount.main(Wordcount.scala)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:766)
        at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)
        at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)
        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123)
        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)

As you said, you are using spark 1.5.1 but we do not have DataSet in spark 1.5.1.

Probably you have boiled your code in Spark 1.5.1 and deployed/running application in Spark1.6 and above Spark Context objects.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM