hadoop - Spark on Yarn client run with errors -


i use hadoop 2.7.1 , spark 2.0.0.

when launch:

./bin/spark-shell --master yarn --deploy-mode client 

i have following error:

6/11/10 12:03:06 warn client: neither spark.yarn.jars nor spark.yarn.archive set, falling uploading libraries under spark_home. 16/11/10 12:03:13 error sparkcontext: error initializing sparkcontext. org.apache.spark.sparkexception: yarn application has ended! might have been killed or unable launch application master. @ org.apache.spark.scheduler.cluster.yarnclientschedulerbackend.waitforapplication(yarnclientschedulerbackend.scala:85) @ org.apache.spark.scheduler.cluster.yarnclientschedulerbackend.start(yarnclientschedulerbackend.scala:62) @ org.apache.spark.scheduler.taskschedulerimpl.start(taskschedulerimpl.scala:149) @ org.apache.spark.sparkcontext.<init>(sparkcontext.scala:500) @ org.apache.spark.sparkcontext$.getorcreate(sparkcontext.scala:2256) @ org.apache.spark.sql.sparksession$builder$$anonfun$8.apply(sparksession.scala:831) @ org.apache.spark.sql.sparksession$builder$$anonfun$8.apply(sparksession.scala:823) @ scala.option.getorelse(option.scala:121) @ org.apache.spark.sql.sparksession$builder.getorcreate(sparksession.scala:823) @ org.apache.spark.repl.main$.createsparksession(main.scala:95) @ $line3.$read$$iw$$iw.<init>(<console>:15) @ $line3.$read$$iw.<init>(<console>:31) @ $line3.$read.<init>(<console>:33) @ $line3.$read$.<init>(<console>:37) @ $line3.$read$.<clinit>(<console>) @ $line3.$eval$.$print$lzycompute(<console>:7) @ $line3.$eval$.$print(<console>:6) @ $line3.$eval.$print(<console>) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:62) @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43) @ java.lang.reflect.method.invoke(method.java:498) @ scala.tools.nsc.interpreter.imain$readevalprint.call(imain.scala:786) @ scala.tools.nsc.interpreter.imain$request.loadandrun(imain.scala:1047) @ scala.tools.nsc.interpreter.imain$wrappedrequest$$anonfun$loadandrunreq$1.apply(imain.scala:638) @ scala.tools.nsc.interpreter.imain$wrappedrequest$$anonfun$loadandrunreq$1.apply(imain.scala:637) @ scala.reflect.internal.util.scalaclassloader$class.ascontext(scalaclassloader.scala:31) @ scala.reflect.internal.util.abstractfileclassloader.ascontext(abstractfileclassloader.scala:19) @ scala.tools.nsc.interpreter.imain$wrappedrequest.loadandrunreq(imain.scala:637) @ scala.tools.nsc.interpreter.imain.interpret(imain.scala:569) @ scala.tools.nsc.interpreter.imain.interpret(imain.scala:565) @ scala.tools.nsc.interpreter.iloop.interpretstartingwith(iloop.scala:807) @ scala.tools.nsc.interpreter.iloop.command(iloop.scala:681) @ scala.tools.nsc.interpreter.iloop.processline(iloop.scala:395) @ org.apache.spark.repl.sparkiloop$$anonfun$initializespark$1.apply$mcv$sp(sparkiloop.scala:38) @ org.apache.spark.repl.sparkiloop$$anonfun$initializespark$1.apply(sparkiloop.scala:37) @ org.apache.spark.repl.sparkiloop$$anonfun$initializespark$1.apply(sparkiloop.scala:37) @ scala.tools.nsc.interpreter.imain.bequietduring(imain.scala:214) @ org.apache.spark.repl.sparkiloop.initializespark(sparkiloop.scala:37) @ org.apache.spark.repl.sparkiloop.loadfiles(sparkiloop.scala:94) @ scala.tools.nsc.interpreter.iloop$$anonfun$process$1.apply$mcz$sp(iloop.scala:920) @ scala.tools.nsc.interpreter.iloop$$anonfun$process$1.apply(iloop.scala:909) @ scala.tools.nsc.interpreter.iloop$$anonfun$process$1.apply(iloop.scala:909) @ scala.reflect.internal.util.scalaclassloader$.savingcontextloader(scalaclassloader.scala:97) @ scala.tools.nsc.interpreter.iloop.process(iloop.scala:909) @ org.apache.spark.repl.main$.domain(main.scala:68) @ org.apache.spark.repl.main$.main(main.scala:51) @ org.apache.spark.repl.main.main(main.scala) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:62) @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43) @ java.lang.reflect.method.invoke(method.java:498) @ org.apache.spark.deploy.sparksubmit$.org$apache$spark$deploy$sparksubmit$$runmain(sparksubmit.scala:729) @ org.apache.spark.deploy.sparksubmit$.dorunmain$1(sparksubmit.scala:185) @ org.apache.spark.deploy.sparksubmit$.submit(sparksubmit.scala:210) @ org.apache.spark.deploy.sparksubmit$.main(sparksubmit.scala:124) @ org.apache.spark.deploy.sparksubmit.main(sparksubmit.scala) 16/11/10 12:03:13  warn yarnschedulerbackend$yarnschedulerendpoint: attempted request executors before has registered! 16/11/10 12:03:14 warn metricssystem: stopping metricssystem not running org.apache.spark.sparkexception: yarn application has ended! might have been killed or unable launch application master. @ org.apache.spark.scheduler.cluster.yarnclientschedulerbackend.waitforapplicatio (yarnclientschedulerbackend.scala:85) @ org.apache.spark.scheduler.cluster.yarnclientschedulerbackend.start(yarnclientschedulerbackend.scala:62) @ org.apache.spark.scheduler.taskschedulerimpl.start(taskschedulerimpl.scala:149) @ org.apache.spark.sparkcontext.<init>(sparkcontext.scala:500) @ org.apache.spark.sparkcontext$.getorcreate(sparkcontext.scala:2256) @ org.apache.spark.sql.sparksession$builder$$anonfun$8.apply(sparksession.scala:831) @ org.apache.spark.sql.sparksession$builder$$anonfun$8.apply(sparksession.scala:823) @ scala.option.getorelse(option.scala:121) @ org.apache.spark.sql.sparksession$builder.getorcreate(sparksession.scala:823) @ org.apache.spark.repl.main$.createsparksession(main.scala:95) ... 47 elided <console>:14: error: not found: value spark import spark.implicits._           ^  <console>:14: error: not found: value spark import spark.sql  

someone can me?


Comments

Popular posts from this blog

aws api gateway - SerializationException in posting new Records via Dynamodb Proxy Service in API -

asp.net - Problems sending emails from forum -