我有以下代码。我已经把它转换成罐子了
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.SQLContext
case class Student (StudentID: Long,
creationdate : java.sql.Timestamp,
mark1 : String,
mark2 : String
)
object Main {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("EMRCustomerApplication")
conf.set("spark.driver.allowMultipleContexts","true")
val spark = SparkSession
.builder()
.appName("Spark In Action")
.master("local")
.enableHiveSupport()
.getOrCreate()
System.setProperty("hive.metastore.uris", "thrift://internal-shared-hive-metastore-elb-550897717.us-west-2.elb.amazonaws.com:9083")
System.setProperty("hive.metastore.execute.setugi", "false")
System.setProperty("spark.driver.allowMultipleContexts","true")
val sc = new SparkContext(conf)
conf.set("spark.driver.allowMultipleContexts","true")
import spark.implicits._
val allRecordsDF = spark.sql(""" select * fromstudent """)
}
}我得到了下面的异常。我想-- jar.This选项提到的是spark类的主类是我执行的命令。
spark-submit --class "Main" s3://Users/test/test_2.10-1.0.jar 我已经做到了。有人能看出问题出在哪里吗?
java.lang.ClassNotFoundException: Main
at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at org.apache.spark.util.Utils$.classForName(Utils.scala:229)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:695)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Command exiting with ret '101'https://stackoverflow.com/questions/47700972
复制相似问题