import org.apache.spark.sql._
object AOTremote1 {
def main(args: Array[String]): Unit = {
val spark = SparkSession.builder()
.master("yarn")
.appName("hortonworks job")
.config("spark.yarn.jars", "hdfs://sandbox:8020//usr/hdp/3.0.1.0-187/spark2/jars/*.jar")
.getOrCreate()
val df = spark.read.option("header", "true")
.csv("/hdfs1/samplefile.csv")
df.show()
}
}我已经安装了从hortonworks沙箱下载到本地系统intellij中的主/资源文件夹的core-site.xml、yar-site.xml、mapred-site.xml、hdfs-site.xml。
我已经在本地系统中安装了主机文件,如下所示
# localhost name resolution is handled within DNS itself.
#127.0.0.1 localhost
#::1 localhost
127.0.0.1 sandbox.hortonworks.com sandbox 下面是实际发生的错误
java.lang.IllegalArgumentException: java.net.UnknownHostException: sandbox-hdp.hortonworks.com下面是intellij的详细错误报告
/*
* 提示:该行代码过长,系统自动注释不进行高亮。一键复制会移除系统注释
* C:\Java\jdk1.8.0_311\bin\java.exe "-javaagent:C:\Program Files\JetBrains\IntelliJ IDEA Community Edition 2021.3.2\lib\idea_rt.jar=50123:C:\Program Files\JetBrains\IntelliJ IDEA Community Edition 2021.3.2\bin" -Dfile.encoding=UTF-8 -classpath C:\Java\jdk1.8.0_311\jre\lib\charsets.jar;C:\Java\jdk1.8.0_311\jre\lib\deploy.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\access-bridge-64.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\cldrdata.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\dnsns.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\jaccess.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\jfxrt.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\localedata.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\nashorn.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\sunec.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\sunjce_provider.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\sunmscapi.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\sunpkcs11.jar;C:\Java\jdk1.8.0_311\jre\lib\ext\zipfs.jar;C:\Java\jdk1.8.0_311\jre\lib\javaws.jar;C:\Java\jdk1.8.0_311\jre\lib\jce.jar;C:\Java\jdk1.8.0_311\jre\lib\jfr.jar;C:\Java\jdk1.8.0_311\jre\lib\jfxswt.jar;C:\Java\jdk1.8.0_311\jre\lib\jsse.jar;C:\Java\jdk1.8.0_311\jre\lib\management-agent.jar;C:\Java\jdk1.8.0_311\jre\lib\plugin.jar;C:\Java\jdk1.8.0_311\jre\lib\resources.jar;C:\Java\jdk1.8.0_311\jre\lib\rt.jar;C:\Users\raajs\IdeaProjects\freelanceproject1\target\classes;C:\Users\raajs\.ivy2\cache\org.scala-lang\scala-library\jars\scala-library-2.12.4.jar;C:\Users\raajs\.ivy2\cache\org.scala-lang\scala-reflect\jars\scala-reflect-2.12.4.jar;C:\Users\raajs\.ivy2\cache\org.scala-lang.modules\scala-xml_2.12\bundles\scala-xml_2.12-1.0.6.jar;C:\Users\raajs\.m2\repository\org\apache\spark\spark-core_2.12\3.2.1\spark-core_2.12-3.2.1.jar;C:\Users\raajs\.m2\repository\org\apache\avro\avro\1.10.2\avro-1.10.2.jar;C:\Users\raajs\.m2\repository\com\fasterxml\jackson\core\jackson-core\2.12.2\jackson-core-2.12.2.jar;C:\Users\raajs\.m2\repository\org\apache\commons\commons-compress\1.20\commons-compress-1.20.jar;C:\Users\raajs\.m2\repository\org\apache\avro\avro-mapred\1.10.2\avro-mapred-1.10.2.jar;C:\Users\raajs\.m2\repository\org\apache\avro\avro-ipc\1.10.2\avro-ipc-1.10.2.jar;C:\Users\raajs\.m2\repository\org\tukaani\xz\1.8\xz-1.8.jar;C:\Users\raajs\.m2\repository\com\twitter\chill_2.12\0.10.0\chill_2.12-0.10.0.jar;C:\Users\raajs\.m2\repository\com\esotericsoftware\kryo-shaded\4.0.2\kryo-shaded-4.0.2.jar;C:\Users\raajs\.m2\repository\com\esotericsoftware\minlog\1.3.0\minlog-1.3.0.jar;C:\Users\raajs\.m2\repository\org\objenesis\objenesis\2.5.1\objenesis-2.5.1.jar;C:\Users\raajs\.m2\repository\com\twitter\chill-java\0.10.0\chill-java-0.10.0.jar;C:\Users\raajs\.m2\repository\org\apache\xbean\xbean-asm9-shaded\4.20\xbean-asm9-shaded-4.20.jar;C:\Users\raajs\.m2\repository\org\apache\hadoop\hadoop-client-api\3.3.1\hadoop-client-api-3.3.1.jar;C:\Users\raajs\.m2\repository\org\apache\hadoop\hadoop-client-runtime\3.3.1\hadoop-client-runtime-3.3.1.jar;C:\Users\raajs\.m2\repository\org\apache\htrace\htrace-core4\4.1.0-incubating\htrace-core4-4.1.0-incubating.jar;C:\Users\raajs\.m2\repository\commons-logging\commons-logging\1.1.3\commons-logging-1.1.3.jar;C:\Users\raajs\.m2\repository\org\apache\spark\spark-launcher_2.12\3.2.1\spark-launcher_2.12-3.2.1.jar;C:\Users\raajs\.m2\repository\org\apache\spark\spark-kvstore_2.12\3.2.1\spark-kvstore_2.12-3.2.1.jar;C:\Users\raajs\.m2\repository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;C:\Users\raajs\.m2\repository\com\fasterxml\jackson\core\jackson-annotations\2.12.3\jackson-annotations-2.12.3.jar;C:\Users\raajs\.m2\repository\org\apache\spark\spark-network-common_2.12\3.2.1\spark-network-common_2.12-3.2.1.jar;C:\Users\raajs\.m2\repository\com\google\crypto\tink\tink\1.6.0\tink-1.6.0.jar;C:\Users\raajs\.m2\repository\com\google\code\gson\gson\2.8.6\gson-2.8.6.jar;C:\Users\raajs\.m2\repository\org\apache\spark\spark-network-shuffle_2.12\3.2.1\spark-network-shuffle_2.12-3.2.1.jar;C:\Users\raajs\.m2\repository\org\apache\spark\spark-unsafe_2.12\3.2.1\spark-unsafe_2.12-3.2.1.jar;C:\Users\raajs\.m2\repository\javax\activation\activation\1.1.1\activation-1.1.1.jar;C:\Users\raajs\.m2\repository\org\apache\curator\curator-recipes\2.13.0\curator-recipes-2.13.0.jar;C:\Users\raajs\.m2\repository\org\apache\curator\curator-framework\2.13.0\curator-framework-2.13.0.jar;C:\Users\raajs\.m2\repository\org\apache\curator\curator-client\2.13.0\curator-client-2.13.0.jar;C:\Users\raajs\.m2\repository\com\google\guava\guava\16.0.1\guava-16.0.1.jar;C:\Users\raajs\.m2\repository\org\apache\zookeeper\zookeeper\3.6.2\zookeeper-3.6.2.jar;C:\Users\raajs\.m2\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;C:\Users\raajs\.m2\repository\org\apache\zookeeper\zookeeper-jute\3.6.2\zookeeper-jute-3.6.2.jar;C:\Users\raajs\.m2\repository\org\apache\yetus\audience-annotations\0.5.0\audience-annotations-0.5.0.jar;C:\Users\raajs\.m2\repository\jakarta\servlet\jakarta.servlet-api\4.0.3\jakarta.servlet-api-4.0.3.jar;C:\Users\raajs\.m2\repository\commons-codec\commons-codec\1.15\commons-codec-1.15.jar;C:\Users\raajs\.m2\repository\org\apache\commons\commons-lang3\3.12.0\commons-lang3-3.12.0.jar;C:\Users\raajs\.m2\repository\org\apache\commons\commons-math3\3.4.1\commons-math3-3.4.1.jar;C:\Users\raajs\.m2\repository\org\apache\commons\commons-text\1.6\commons-text-1.6.jar;C:\Users\raajs\.m2\repository\commons-io\commons-io\2.8.0\commons-io-2.8.0.jar;C:\Users\raajs\.m2\repository\commons-collections\commons-collections\3.2.2\commons-collections-3.2.2.jar;C:\Users\raajs\.m2\repository\com\google\code\findbugs\jsr305\3.0.0\jsr305-3.0.0.jar;C:\Users\raajs\.m2\repository\org\slf4j\slf4j-api\1.7.30\slf4j-api-1.7.30.jar;C:\Users\raajs\.m2\repository\org\slf4j\jul-to-slf4j\1.7.30\jul-to-slf4j-1.7.30.jar;C:\Users\raajs\.m2\repository\org\slf4j\jcl-over-slf4j\1.7.30\jcl-over-slf4j-1.7.30.jar;C:\Users\raajs\.m2\repository\log4j\log4j\1.2.17\log4j-1.2.17.jar;C:\Users\raajs\.m2\repository\org\slf4j\slf4j-log4j12\1.7.30\slf4j-log4j12-1.7.30.jar;C:\Users\raajs\.m2\repository\com\ning\compress-lzf\1.0.3\compress-lzf-1.0.3.jar;C:\Users\raajs\.m2\repository\org\xerial\snappy\snappy-java\1.1.8.4\snappy-java-1.1.8.4.jar;C:\Users\raajs\.m2\repository\org\lz4\lz4-java\1.7.1\lz4-java-1.7.1.jar;C:\Users\raajs\.m2\repository\com\github\luben\zstd-jni\1.5.0-4\zstd-jni-1.5.0-4.jar;C:\Users\raajs\.m2\repository\org\roaringbitmap\RoaringBitmap\0.9.0\RoaringBitmap-0.9.0.jar;C:\Users\raajs\.m2\repository\org\roaringbitmap\shims\0.9.0\shims-0.9.0.jar;C:\Users\raajs\.m2\repository\commons-net\commons-net\3.1\commons-net-3.1.jar;C:\Users\raajs\.m2\repository\org\scala-lang\modules\scala-xml_2.12\1.2.0\scala-xml_2.12-1.2.0.jar;C:\Users\raajs\.m2\repository\org\scala-lang\scala-library\2.12.15\scala-library-2.12.15.jar;C:\Users\raajs\.m2\repository\org\scala-lang\scala-reflect\2.12.15\scala-reflect-2.12.15.jar;C:\Users\raajs\.m2\repository\org\json4s\json4s-jackson_2.12\3.7.0-M11\json4s-jackson_2.12-3.7.0-M11.jar;C:\Users\raajs\.m2\repository\org\json4s\json4s-core_2.12\3.7.0-M11\json4s-core_2.12-3.7.0-M11.jar;C:\Users\raajs\.m2\repository\org\json4s\json4s-ast_2.12\3.7.0-M11\json4s-ast_2.12-3.7.0-M11.jar;C:\Users\raajs\.m2\repository\org\json4s\json4s-scalap_2.12\3.7.0-M11\json4s-scalap_2.12-3.7.0-M11.jar;C:\Users\raajs\.m2\repository\org\glassfish\jersey\core\jersey-client\2.34\jersey-client-2.34.jar;C:\Users\raajs\.m2\repository\jakarta\ws\rs\jakarta.ws.rs-api\2.1.6\jakarta.ws.rs-api-2.1.6.jar;C:\Users\raajs\.m2\repository\org\glassfish\hk2\external\jakarta.inject\2.6.1\jakarta.inject-2.6.1.jar;C:\Users\raajs\.m2\repository\org\glassfish\jersey\core\jersey-common\2.34\jersey-common-2.34.jar;C:\Users\raajs\.m2\repository\jakarta\annotation\jakarta.annotation-api\1.3.5\jakarta.annotation-api-1.3.5.jar;C:\Users\raajs\.m2\repository\org\glassfish\hk2\osgi-resource-locator\1.0.3\osgi-resource-locator-1.0.3.jar;C:\Users\raajs\.m2\repository\org\glassfish\jersey\core\jersey-server\2.34\jersey-server-2.34.jar;C:\Users\raajs\.m2\repository\jakarta\validation\jakarta.validation-api\2.0.2\jakarta.validation-api-2.0.2.jar;C:\Users\raajs\.m2\repository\org\glassfish\jersey\containers\jersey-container-servlet\2.34\jersey-container-servlet-2.34.jar;C:\Users\raajs\.m2\repository\org\glassfish\jersey\containers\jersey-container-servlet-core\2.34\jersey-container-servlet-core-2.34.jar;C:\Users\raajs\.m2\repository\org\glassfish\jersey\inject\jersey-hk2\2.34\jersey-hk2-2.34.jar;C:\Users\raajs\.m2\repository\org\glassfish\hk2\hk2-locator\2.6.1\hk2-locator-2.6.1.jar;C:\Users\raajs\.m2\repository\org\glassfish\hk2\external\aopalliance-repackaged\2.6.1\aopalliance-repackaged-2.6.1.jar;C:\Users\raajs\.m2\repository\org\glassfish\hk2\hk2-api\2.6.1\hk2-api-2.6.1.jar;C:\Users\raajs\.m2\repository\org\glassfish\hk2\hk2-utils\2.6.1\hk2-utils-2.6.1.jar;C:\Users\raajs\.m2\repository\org\javassist\javassist\3.25.0-GA\javassist-3.25.0-GA.jar;C:\Users\raajs\.m2\repository\io\netty\netty-all\4.1.68.Final\netty-all-4.1.68.Final.jar;C:\Users\raajs\.m2\repository\com\clearspring\analytics\stream\2.9.6\stream-2.9.6.jar;C:\Users\raajs\.m2\repository\io\dropwizard\metrics\metrics-core\4.2.0\metrics-core-4.2.0.jar;C:\Users\raajs\.m2\repository\io\dropwizard\metrics\metrics-jvm\4.2.0\metrics-jvm-4.2.0.jar;C:\Users\raajs\.m2\repository\io\dropwizard\metrics\metrics-json\4.2.0\metrics-json-4.2.0.jar;C:\Users\raajs\.m2\repository\io\dropwizard\metrics\metrics-graphite\4.2.0\metrics-graphite-4.2.0.jar;C:\Users\raajs\.m2\repository\io\dropwizard\metrics\metrics-jmx\4.2.0\metrics-jmx-4.2.0.jar;C:\Users\raajs\.m2\repository\com\fasterxml\jackson\core\jackson-databind\2.12.3\jackson-databind-2.12.3.jar;C:\Users\raajs\.m2\repository\com\fasterxml\jackson\module\jackson-module-scala_2.12\2.12.3\jackson-module-scala_2.12-2.12.3.jar;C:\Users\raajs\.m2\repository\com\thoughtworks\paranamer\paranamer\2.8\paranamer-2.8.jar;C:\Users\raajs\.m2\repository\org\apache\ivy\ivy\2.5.0\ivy-2.5.0.jar;C:\Users\raajs\.m2\repository\oro\oro\2.0.8\oro-2.0.8.jar;C:\Users\raajs\.m2\repository\net\razorvine\pyrolite\4.30\pyrolite-4.30.jar;C:\Users\raajs\.m2\repository\net\sf\py4j\py4j\0.10.9.3\py4j-0.10.9.3.jar;C:\Users\raajs\.m2\repository\org\apache\spark\spark-tags_2.12\3.2.1\spark-tags_2.12-3.2.1.jar;C:\Users\raajs\.m2\repository\org\apache\commons\commons-crypto\1.1.0\commons-crypto-1.1.0.jar;C:\Users\raajs\.m2\repository\org\spark-project\spark\unused\1.0.0\unused-1.0.0.jar;C:\Users\raajs\.m2\repository\org\apache\spark\spark-sql_2.12\3.2.1\spark-sql_2.12-3.2.1.jar;C:\Users\raajs\.m2\repository\org\rocksdb\rocksdbjni\6.20.3\rocksdbjni-6.20.3.jar;C:\Users\raajs\.m2\repository\com\univocity\univocity-parsers\2.9.1\univocity-parsers-2.9.1.jar;C:\Users\raajs\.m2\repository\org\apache\spark\spark-sketch_2.12\3.2.1\spark-sketch_2.12-3.2.1.jar;C:\Users\raajs\.m2\repository\org\apache\spark\spark-catalyst_2.12\3.2.1\spark-catalyst_2.12-3.2.1.jar;C:\Users\raajs\.m2\repository\org\scala-lang\modules\scala-parser-combinators_2.12\1.1.2\scala-parser-combinators_2.12-1.1.2.jar;C:\Users\raajs\.m2\repository\org\codehaus\janino\janino\3.0.16\janino-3.0.16.jar;C:\Users\raajs\.m2\repository\org\codehaus\janino\commons-compiler\3.0.16\commons-compiler-3.0.16.jar;C:\Users\raajs\.m2\repository\org\antlr\antlr4-runtime\4.8\antlr4-runtime-4.8.jar;C:\Users\raajs\.m2\repository\javax\xml\bind\jaxb-api\2.2.11\jaxb-api-2.2.11.jar;C:\Users\raajs\.m2\repository\org\apache\arrow\arrow-vector\2.0.0\arrow-vector-2.0.0.jar;C:\Users\raajs\.m2\repository\org\apache\arrow\arrow-format\2.0.0\arrow-format-2.0.0.jar;C:\Users\raajs\.m2\repository\org\apache\arrow\arrow-memory-core\2.0.0\arrow-memory-core-2.0.0.jar;C:\Users\raajs\.m2\repository\com\google\flatbuffers\flatbuffers-java\1.9.0\flatbuffers-java-1.9.0.jar;C:\Users\raajs\.m2\repository\org\apache\arrow\arrow-memory-netty\2.0.0\arrow-memory-netty-2.0.0.jar;C:\Users\raajs\.m2\repository\org\apache\orc\orc-core\1.6.12\orc-core-1.6.12.jar;C:\Users\raajs\.m2\repository\org\apache\orc\orc-shims\1.6.12\orc-shims-1.6.12.jar;C:\Users\raajs\.m2\repository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;C:\Users\raajs\.m2\repository\io\airlift\aircompressor\0.21\aircompressor-0.21.jar;C:\Users\raajs\.m2\repository\org\jetbrains\annotations\17.0.0\annotations-17.0.0.jar;C:\Users\raajs\.m2\repository\org\threeten\threeten-extra\1.5.0\threeten-extra-1.5.0.jar;C:\Users\raajs\.m2\repository\org\apache\orc\orc-mapreduce\1.6.12\orc-mapreduce-1.6.12.jar;C:\Users\raajs\.m2\repository\org\apache\hive\hive-storage-api\2.7.2\hive-storage-api-2.7.2.jar;C:\Users\raajs\.m2\repository\org\apache\parquet\parquet-column\1.12.2\parquet-column-1.12.2.jar;C:\Users\raajs\.m2\repository\org\apache\parquet\parquet-common\1.12.2\parquet-common-1.12.2.jar;C:\Users\raajs\.m2\repository\org\apache\parquet\parquet-encoding\1.12.2\parquet-encoding-1.12.2.jar;C:\Users\raajs\.m2\repository\org\apache\parquet\parquet-hadoop\1.12.2\parquet-hadoop-1.12.2.jar;C:\Users\raajs\.m2\repository\org\apache\parquet\parquet-format-structures\1.12.2\parquet-format-structures-1.12.2.jar;C:\Users\raajs\.m2\repository\org\apache\parquet\parquet-jackson\1.12.2\parquet-jackson-1.12.2.jar;C:\Users\raajs\.m2\repository\org\apache\spark\spark-yarn_2.12\3.2.1\spark-yarn_2.12-3.2.1.jar AOTremote1
*/
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
22/02/18 18:23:06 INFO SparkContext: Running Spark version 3.2.1
22/02/18 18:23:09 INFO ResourceUtils: ==============================================================
22/02/18 18:23:09 INFO ResourceUtils: No custom resources configured for spark.driver.
22/02/18 18:23:09 INFO ResourceUtils: ==============================================================
22/02/18 18:23:09 INFO SparkContext: Submitted application: hortonworks job
22/02/18 18:23:09 INFO ResourceProfile: Default ResourceProfile created, executor resources: Map(cores -> name: cores, amount: 1, script: , vendor: , memory -> name: memory, amount: 1024, script: , vendor: , offHeap -> name: offHeap, amount: 0, script: , vendor: ), task resources: Map(cpus -> name: cpus, amount: 1.0)
22/02/18 18:23:09 INFO ResourceProfile: Limiting resource is cpus at 1 tasks per executor
22/02/18 18:23:09 INFO ResourceProfileManager: Added ResourceProfile id: 0
22/02/18 18:23:10 INFO SecurityManager: Changing view acls to: raajs
22/02/18 18:23:10 INFO SecurityManager: Changing modify acls to: raajs
22/02/18 18:23:10 INFO SecurityManager: Changing view acls groups to:
22/02/18 18:23:10 INFO SecurityManager: Changing modify acls groups to:
22/02/18 18:23:10 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(raajs); groups with view permissions: Set(); users with modify permissions: Set(raajs); groups with modify permissions: Set()
22/02/18 18:23:13 INFO Utils: Successfully started service 'sparkDriver' on port 50165.
22/02/18 18:23:13 INFO SparkEnv: Registering MapOutputTracker
22/02/18 18:23:13 INFO SparkEnv: Registering BlockManagerMaster
22/02/18 18:23:13 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
22/02/18 18:23:13 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up
22/02/18 18:23:13 INFO SparkEnv: Registering BlockManagerMasterHeartbeat
22/02/18 18:23:13 INFO DiskBlockManager: Created local directory at C:\Users\raajs\AppData\Local\Temp\blockmgr-a0a8148b-b14d-41ba-adee-6af25b154971
22/02/18 18:23:13 INFO MemoryStore: MemoryStore started with capacity 1994.1 MiB
22/02/18 18:23:13 INFO SparkEnv: Registering OutputCommitCoordinator
22/02/18 18:23:14 WARN Utils: Service 'SparkUI' could not bind on port 4040. Attempting port 4041.
22/02/18 18:23:14 INFO Utils: Successfully started service 'SparkUI' on port 4041.
22/02/18 18:23:14 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://localhost:4041
22/02/18 18:23:17 INFO AbstractService: Service org.apache.hadoop.yarn.client.api.impl.YarnClientImpl failed in state INITED
java.lang.IllegalArgumentException: java.net.UnknownHostException: sandbox-hdp.hortonworks.com
at org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:466)
at org.apache.hadoop.yarn.util.timeline.TimelineUtils.buildTimelineTokenService(TimelineUtils.java:165)
at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceInit(YarnClientImpl.java:200)
at org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:175)
at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:62)
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:220)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:581)
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2690)
at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:949)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:943)
at AOTremote1$.main(AOTremote1.scala:19)
at AOTremote1.main(AOTremote1.scala)
Caused by: java.net.UnknownHostException: sandbox-hdp.hortonworks.com
... 14 more
22/02/18 18:23:17 ERROR SparkContext: Error initializing SparkContext.
java.lang.IllegalArgumentException: java.net.UnknownHostException: sandbox-hdp.hortonworks.com
at org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:466)
at org.apache.hadoop.yarn.util.timeline.TimelineUtils.buildTimelineTokenService(TimelineUtils.java:165)
at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceInit(YarnClientImpl.java:200)
at org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:175)
at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:62)
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:220)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:581)
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2690)
at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:949)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:943)
at AOTremote1$.main(AOTremote1.scala:19)
at AOTremote1.main(AOTremote1.scala)
Caused by: java.net.UnknownHostException: sandbox-hdp.hortonworks.com
... 14 more
22/02/18 18:23:17 INFO SparkUI: Stopped Spark web UI at http://localhost:4041
22/02/18 18:23:17 WARN YarnSchedulerBackend$YarnSchedulerEndpoint: Attempted to request executors before the AM has registered!
22/02/18 18:23:17 INFO YarnClientSchedulerBackend: Shutting down all executors
22/02/18 18:23:17 INFO YarnSchedulerBackend$YarnDriverEndpoint: Asking each executor to shut down
22/02/18 18:23:17 INFO YarnClientSchedulerBackend: YARN client scheduler backend Stopped
22/02/18 18:23:17 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
22/02/18 18:23:17 INFO MemoryStore: MemoryStore cleared
22/02/18 18:23:17 INFO BlockManager: BlockManager stopped
22/02/18 18:23:17 INFO BlockManagerMaster: BlockManagerMaster stopped
22/02/18 18:23:17 WARN MetricsSystem: Stopping a MetricsSystem that is not running
22/02/18 18:23:17 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
22/02/18 18:23:17 INFO SparkContext: Successfully stopped SparkContext
Exception in thread "main" java.lang.IllegalArgumentException: java.net.UnknownHostException: sandbox-hdp.hortonworks.com
at org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:466)
at org.apache.hadoop.yarn.util.timeline.TimelineUtils.buildTimelineTokenService(TimelineUtils.java:165)
at org.apache.hadoop.yarn.client.api.impl.YarnClientImpl.serviceInit(YarnClientImpl.java:200)
at org.apache.hadoop.service.AbstractService.init(AbstractService.java:164)
at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:175)
at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:62)
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:220)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:581)
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2690)
at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$2(SparkSession.scala:949)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:943)
at AOTremote1$.main(AOTremote1.scala:19)
at AOTremote1.main(AOTremote1.scala)
Caused by: java.net.UnknownHostException: sandbox-hdp.hortonworks.com
... 14 more
22/02/18 18:23:17 INFO ShutdownHookManager: Shutdown hook called
22/02/18 18:23:17 INFO ShutdownHookManager: Deleting directory C:\Users\raajs\AppData\Local\Temp\spark-e91a7e82-f75f-41b8-a44d-e044effa2a2d
Process finished with exit code 1如何纠正这一问题
发布于 2022-03-02 17:35:28
您应该编辑/创建一个具有$SPARK_HOME/conf/core-site.xml配置的fs.defaultFS来设置namenode地址,而不是修改您的/etc/hosts
请记住,Hortonworks沙箱是不推荐的,并且您不需要HDFS来使用Spark。
https://stackoverflow.com/questions/71174140
复制相似问题