build spark standalone NoClassDefFoundError com/fasterxml/jackson/databind/Module

来源:互联网 发布:matplotlib python 编辑:程序博客网 时间:2024/05/16 17:39
搭建spark standalone集群模式 1个master 1个slave 报错,希望有经验的人帮助下!谢谢了
环境为
UNIX :         ubuntu14.04.2,
JDK            jdk1.7.0_67
SCALA          scala-2.11.8
HADOOP         hadoop-2.6.0-cdh5.8.0
SPARK          spark-1.6.0-cdh5.8.0  (http://archive.cloudera.com/cdh5/cdh/5/)


spark-env.sh 文件
JAVA_HOME=/usr/local/jvm/jdk1.7.0_67
SPARK_HOME=/usr/local/cloudera/spark-1.6.0-cdh5.8.0
SPARK_MASTER_IP=master
SPARK_MASTER_PORT=7077
SPARK_WORKER_CORES=1
SPARK_WORKER_MEMORY=512m
PARK_WORKER_PORT=8123
SPARK_WORKER_INSTANCE=1
SPARK_WORKER_DIR=$SPARK_HOME/work
SPARK_DIST_CLASSPATH=$(hadoop classpath)


slaves 文件
slave1


------------------------------------------------------------------------
启动spark-shell时报错
**java.lang.NoClassDefFoundError**: com/fasterxml/jackson/databind/Module
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:270)
at org.apache.spark.util.Utils$.classForName(Utils.scala:175)
at org.apache.spark.metrics.MetricsSystem$$anonfun$registerSinks$1.apply(MetricsSystem.scala:190)
at org.apache.spark.metrics.MetricsSystem$$anonfun$registerSinks$1.apply(MetricsSystem.scala:186)
at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:98)
at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:98)
at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:226)
at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:39)
at scala.collection.mutable.HashMap.foreach(HashMap.scala:98)
at org.apache.spark.metrics.MetricsSystem.registerSinks(MetricsSystem.scala:186)
at org.apache.spark.metrics.MetricsSystem.start(MetricsSystem.scala:100)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:551)
at org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:1022)
at $iwC$$iwC.<init>(<console>:15)
...


java.lang.NullPointerException
at org.apache.spark.sql.SQLContext$.createListenerAndUI(SQLContext.scala:1375)
at org.apache.spark.sql.SQLContext.<init>(SQLContext.scala:77)
at org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1038)
at $iwC$$iwC.<init>(<console>:15)
at $iwC.<init>(<console>:24)
at <init>(<console>:26)
at .<init>(<console>:30)
at .<clinit>(<console>)
at .<init>(<console>:7)
at .<clinit>(<console>)
at $print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invok
 
 ...


  at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)


<console>:16: error: not found: value sqlContext
         import sqlContext.implicits._
                ^
<console>:16: error: not found: value sqlContext
         import sqlContext.sql
                ^


scala>

0 0
原创粉丝点击