spark 启动

来源:互联网 发布:nginx lua 开发 编辑:程序博客网 时间:2024/06/07 06:02
[root@name01 conf]# pwd/usr/local/spark/spark-1.4.0/conf[root@name01 conf]# ../sbin/start-dfs.shbash: ../sbin/start-dfs.sh: No such file or directory[root@name01 conf]# lsdocker.properties.template  log4j.properties.template    slaves           spark-defaults.conf           spark-env.shfairscheduler.xml.template  metrics.properties.template  slaves.template  spark-defaults.conf.template  spark-env.sh.template[root@name01 conf]# ls -ltrtotal 44-rwxr-xr-x 1 hadoop hadoop 3318 Jun  2  2015 spark-env.sh.template-rw-r--r-- 1 hadoop hadoop  507 Jun  2  2015 spark-defaults.conf.template-rw-r--r-- 1 hadoop hadoop   80 Jun  2  2015 slaves.template-rw-r--r-- 1 hadoop hadoop 5565 Jun  2  2015 metrics.properties.template-rw-r--r-- 1 hadoop hadoop  632 Jun  2  2015 log4j.properties.template-rw-r--r-- 1 hadoop hadoop  303 Jun  2  2015 fairscheduler.xml.template-rw-r--r-- 1 hadoop hadoop  202 Jun  2  2015 docker.properties.template-rw-r--r-- 1 root   root     80 Jun 10 20:17 slaves-rwxr-xr-x 1 root   root   3927 Jun 10 20:42 spark-env.sh-rw-r--r-- 1 root   root    507 Jun 10 21:38 spark-defaults.conf[root@name01 conf]# cd ..[root@name01 spark-1.4.0]# ls -ltrtotal 668drwxr-xr-x 2 hadoop hadoop   4096 Jun  2  2015 sbin-rw-r--r-- 1 hadoop hadoop    134 Jun  2  2015 RELEASE-rw-r--r-- 1 hadoop hadoop   3624 Jun  2  2015 README.mddrwxr-xr-x 3 hadoop hadoop   4096 Jun  2  2015 Rdrwxr-xr-x 6 hadoop hadoop   4096 Jun  2  2015 python-rw-r--r-- 1 hadoop hadoop  22559 Jun  2  2015 NOTICE-rw-r--r-- 1 hadoop hadoop  50902 Jun  2  2015 LICENSEdrwxr-xr-x 2 hadoop hadoop   4096 Jun  2  2015 libdrwxr-xr-x 3 hadoop hadoop   4096 Jun  2  2015 examplesdrwxr-xr-x 3 hadoop hadoop   4096 Jun  2  2015 ec2drwxr-xr-x 3 hadoop hadoop   4096 Jun  2  2015 data-rw-r--r-- 1 hadoop hadoop 561149 Jun  2  2015 CHANGES.txtdrwxr-xr-x 2 hadoop hadoop   4096 Jun  2  2015 bindrwxr-xr-x 2 hadoop hadoop   4096 Jun 10 21:38 conf[root@name01 spark-1.4.0]# sbin/start-dfs.shbash: sbin/start-dfs.sh: No such file or directory[root@name01 spark-1.4.0]# jps6273 MainGenericRunner3334 RunJar3015 NodeManager2600 DataNode7016 Jps2922 ResourceManager2749 SecondaryNameNode2510 NameNode[root@name01 spark-1.4.0]# cd ..[root@name01 spark]# cd spark-1.4.0[root@name01 spark-1.4.0]# cd ..[root@name01 spark]# cd spark-1.4.0[root@name01 spark-1.4.0]# ls -ltrtotal 668drwxr-xr-x 2 hadoop hadoop   4096 Jun  2  2015 sbin-rw-r--r-- 1 hadoop hadoop    134 Jun  2  2015 RELEASE-rw-r--r-- 1 hadoop hadoop   3624 Jun  2  2015 README.mddrwxr-xr-x 3 hadoop hadoop   4096 Jun  2  2015 Rdrwxr-xr-x 6 hadoop hadoop   4096 Jun  2  2015 python-rw-r--r-- 1 hadoop hadoop  22559 Jun  2  2015 NOTICE-rw-r--r-- 1 hadoop hadoop  50902 Jun  2  2015 LICENSEdrwxr-xr-x 2 hadoop hadoop   4096 Jun  2  2015 libdrwxr-xr-x 3 hadoop hadoop   4096 Jun  2  2015 examplesdrwxr-xr-x 3 hadoop hadoop   4096 Jun  2  2015 ec2drwxr-xr-x 3 hadoop hadoop   4096 Jun  2  2015 data-rw-r--r-- 1 hadoop hadoop 561149 Jun  2  2015 CHANGES.txtdrwxr-xr-x 2 hadoop hadoop   4096 Jun  2  2015 bindrwxr-xr-x 2 hadoop hadoop   4096 Jun 10 21:38 conf[root@name01 spark-1.4.0]# cd sbin/[root@name01 sbin]# ls -ltrtotal 84-rwxr-xr-x 1 hadoop hadoop 1012 Jun  2  2015 stop-thriftserver.sh-rwxr-xr-x 1 hadoop hadoop 1175 Jun  2  2015 stop-slaves.sh-rwxr-xr-x 1 hadoop hadoop 1478 Jun  2  2015 stop-slave.sh-rwxr-xr-x 1 hadoop hadoop 1013 Jun  2  2015 stop-shuffle-service.sh-rwxr-xr-x 1 hadoop hadoop 1041 Jun  2  2015 stop-mesos-dispatcher.sh-rwxr-xr-x 1 hadoop hadoop 1123 Jun  2  2015 stop-master.sh-rwxr-xr-x 1 hadoop hadoop 1002 Jun  2  2015 stop-history-server.sh-rwxr-xr-x 1 hadoop hadoop 1386 Jun  2  2015 stop-all.sh-rwxr-xr-x 1 hadoop hadoop 1792 Jun  2  2015 start-thriftserver.sh-rwxr-xr-x 1 hadoop hadoop 1920 Jun  2  2015 start-slaves.sh-rwxr-xr-x 1 hadoop hadoop 2817 Jun  2  2015 start-slave.sh-rwxr-xr-x 1 hadoop hadoop 1212 Jun  2  2015 start-shuffle-service.sh-rwxr-xr-x 1 hadoop hadoop 1555 Jun  2  2015 start-mesos-dispatcher.sh-rwxr-xr-x 1 hadoop hadoop 1881 Jun  2  2015 start-master.sh-rwxr-xr-x 1 hadoop hadoop 1480 Jun  2  2015 start-history-server.sh-rwxr-xr-x 1 hadoop hadoop 1267 Jun  2  2015 start-all.sh-rwxr-xr-x 1 hadoop hadoop 1176 Jun  2  2015 spark-daemons.sh-rwxr-xr-x 1 hadoop hadoop 5184 Jun  2  2015 spark-daemon.sh-rwxr-xr-x 1 hadoop hadoop 1609 Jun  2  2015 spark-config.sh-rwxr-xr-x 1 hadoop hadoop 2749 Jun  2  2015 slaves.sh[root@name01 sbin]# jps6273 MainGenericRunner3334 RunJar3015 NodeManager2600 DataNode2922 ResourceManager7036 Jps2749 SecondaryNameNode2510 NameNode[root@name01 sbin]# start-master.shbash: start-master.sh: command not found[root@name01 sbin]# pwd/usr/local/spark/spark-1.4.0/sbin[root@name01 sbin]# ls -ltrtotal 84-rwxr-xr-x 1 hadoop hadoop 1012 Jun  2  2015 stop-thriftserver.sh-rwxr-xr-x 1 hadoop hadoop 1175 Jun  2  2015 stop-slaves.sh-rwxr-xr-x 1 hadoop hadoop 1478 Jun  2  2015 stop-slave.sh-rwxr-xr-x 1 hadoop hadoop 1013 Jun  2  2015 stop-shuffle-service.sh-rwxr-xr-x 1 hadoop hadoop 1041 Jun  2  2015 stop-mesos-dispatcher.sh-rwxr-xr-x 1 hadoop hadoop 1123 Jun  2  2015 stop-master.sh-rwxr-xr-x 1 hadoop hadoop 1002 Jun  2  2015 stop-history-server.sh-rwxr-xr-x 1 hadoop hadoop 1386 Jun  2  2015 stop-all.sh-rwxr-xr-x 1 hadoop hadoop 1792 Jun  2  2015 start-thriftserver.sh-rwxr-xr-x 1 hadoop hadoop 1920 Jun  2  2015 start-slaves.sh-rwxr-xr-x 1 hadoop hadoop 2817 Jun  2  2015 start-slave.sh-rwxr-xr-x 1 hadoop hadoop 1212 Jun  2  2015 start-shuffle-service.sh-rwxr-xr-x 1 hadoop hadoop 1555 Jun  2  2015 start-mesos-dispatcher.sh-rwxr-xr-x 1 hadoop hadoop 1881 Jun  2  2015 start-master.sh-rwxr-xr-x 1 hadoop hadoop 1480 Jun  2  2015 start-history-server.sh-rwxr-xr-x 1 hadoop hadoop 1267 Jun  2  2015 start-all.sh-rwxr-xr-x 1 hadoop hadoop 1176 Jun  2  2015 spark-daemons.sh-rwxr-xr-x 1 hadoop hadoop 5184 Jun  2  2015 spark-daemon.sh-rwxr-xr-x 1 hadoop hadoop 1609 Jun  2  2015 spark-config.sh-rwxr-xr-x 1 hadoop hadoop 2749 Jun  2  2015 slaves.sh[root@name01 sbin]# start-slaves.shbash: start-slaves.sh: command not found[root@name01 sbin]# source ~/.bash_profile[root@name01 sbin]# start-slaves.shbash: start-slaves.sh: command not found[root@name01 sbin]# start-master.shbash: start-master.sh: command not found[root@name01 sbin]# cd hadoop/bash: cd: hadoop/: No such file or directory[root@name01 sbin]# source /etc/profile[root@name01 sbin]# start-master.shstarting org.apache.spark.deploy.master.Master, logging to /usr/local/spark/spark-1.4.0/sbin/../logs/spark-root-org.apache.spark.deploy.master.Master-1-name01.out[root@name01 sbin]# start-slaves.shlocalhost: starting org.apache.spark.deploy.worker.Worker, logging to /usr/local/spark/spark-1.4.0/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-name01.out[root@name01 sbin]# jps7408 Jps6273 MainGenericRunner3334 RunJar7158 Master3015 NodeManager7335 Worker2600 DataNode2922 ResourceManager2749 SecondaryNameNode2510 NameNode[root@name01 sbin]# spark-shell16/06/10 21:52:27 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable16/06/10 21:52:27 INFO spark.SecurityManager: Changing view acls to: root16/06/10 21:52:27 INFO spark.SecurityManager: Changing modify acls to: root16/06/10 21:52:27 INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); users with modify permissions: Set(root)16/06/10 21:52:28 INFO spark.HttpServer: Starting HTTP Server16/06/10 21:52:28 INFO server.Server: jetty-8.y.z-SNAPSHOT16/06/10 21:52:28 INFO server.AbstractConnector: Started SocketConnector@0.0.0.0:4770416/06/10 21:52:28 INFO util.Utils: Successfully started service 'HTTP class server' on port 47704.Welcome to      ____              __     / __/__  ___ _____/ /__    _\ \/ _ \/ _ `/ __/  '_/   /___/ .__/\_,_/_/ /_/\_\   version 1.4.0      /_/Using Scala version 2.10.4 (Java HotSpot(TM) Client VM, Java 1.8.0_20)Type in expressions to have them evaluated.Type :help for more information.16/06/10 21:53:07 WARN util.Utils: Your hostname, name01 resolves to a loopback address: 127.0.0.1; using 192.168.0.105 instead (on interface eth4)16/06/10 21:53:07 WARN util.Utils: Set SPARK_LOCAL_IP if you need to bind to another address16/06/10 21:53:07 INFO spark.SparkContext: Running Spark version 1.4.016/06/10 21:53:07 INFO spark.SecurityManager: Changing view acls to: root16/06/10 21:53:07 INFO spark.SecurityManager: Changing modify acls to: root16/06/10 21:53:07 INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); users with modify permissions: Set(root)16/06/10 21:53:09 INFO slf4j.Slf4jLogger: Slf4jLogger started16/06/10 21:53:09 INFO Remoting: Starting remoting16/06/10 21:53:11 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriver@192.168.0.105:56051]16/06/10 21:53:11 INFO util.Utils: Successfully started service 'sparkDriver' on port 56051.16/06/10 21:53:11 INFO spark.SparkEnv: Registering MapOutputTracker16/06/10 21:53:11 INFO spark.SparkEnv: Registering BlockManagerMaster16/06/10 21:53:12 INFO storage.DiskBlockManager: Created local directory at /tmp/spark-f8c38598-453f-44cd-ac29-19d3749ac919/blockmgr-88e4f13f-37fe-49e2-85d0-5beba92e990716/06/10 21:53:12 INFO storage.MemoryStore: MemoryStore started with capacity 267.3 MB16/06/10 21:53:13 INFO spark.HttpFileServer: HTTP File server directory is /tmp/spark-f8c38598-453f-44cd-ac29-19d3749ac919/httpd-a2e61e27-a091-433e-ae24-850e63e4fc6316/06/10 21:53:13 INFO spark.HttpServer: Starting HTTP Server16/06/10 21:53:13 INFO server.Server: jetty-8.y.z-SNAPSHOT16/06/10 21:53:13 INFO server.AbstractConnector: Started SocketConnector@0.0.0.0:4481116/06/10 21:53:13 INFO util.Utils: Successfully started service 'HTTP file server' on port 44811.16/06/10 21:53:13 INFO spark.SparkEnv: Registering OutputCommitCoordinator16/06/10 21:53:13 INFO server.Server: jetty-8.y.z-SNAPSHOT16/06/10 21:53:14 INFO server.AbstractConnector: Started SelectChannelConnector@0.0.0.0:404016/06/10 21:53:14 INFO util.Utils: Successfully started service 'SparkUI' on port 4040.16/06/10 21:53:14 INFO ui.SparkUI: Started SparkUI at http://192.168.0.105:404016/06/10 21:53:14 INFO executor.Executor: Starting executor ID driver on host localhost16/06/10 21:53:14 INFO executor.Executor: Using REPL class URI: http://192.168.0.105:4770416/06/10 21:53:16 INFO util.Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 54439.16/06/10 21:53:16 INFO netty.NettyBlockTransferService: Server created on 5443916/06/10 21:53:16 INFO storage.BlockManagerMaster: Trying to register BlockManager16/06/10 21:53:16 INFO storage.BlockManagerMasterEndpoint: Registering block manager localhost:54439 with 267.3 MB RAM, BlockManagerId(driver, localhost, 54439)16/06/10 21:53:16 INFO storage.BlockManagerMaster: Registered BlockManager16/06/10 21:53:17 INFO repl.SparkILoop: Created spark context..Spark context available as sc.16/06/10 21:53:23 INFO hive.HiveContext: Initializing execution hive, version 0.13.116/06/10 21:53:28 INFO metastore.HiveMetaStore: 0: Opening raw store with implemenation class:org.apache.hadoop.hive.metastore.ObjectStore16/06/10 21:53:29 INFO metastore.ObjectStore: ObjectStore, initialize called16/06/10 21:53:30 INFO DataNucleus.Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored16/06/10 21:53:30 INFO DataNucleus.Persistence: Property datanucleus.cache.level2 unknown - will be ignored16/06/10 21:53:31 WARN DataNucleus.Connection: BoneCP specified but not present in CLASSPATH (or one of dependencies)16/06/10 21:53:39 WARN DataNucleus.Connection: BoneCP specified but not present in CLASSPATH (or one of dependencies)16/06/10 21:53:42 INFO metastore.ObjectStore: Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes="Table,StorageDescriptor,SerDeInfo,Partition,Database,Type,FieldSchema,Order"16/06/10 21:53:42 INFO metastore.MetaStoreDirectSql: MySQL check failed, assuming we are not on mysql: Lexical error at line 1, column 5.  Encountered: "@" (64), after : "".16/06/10 21:53:44 INFO DataNucleus.Datastore: The class "org.apache.hadoop.hive.metastore.model.MFieldSchema" is tagged as "embedded-only" so does not have its own datastore table.16/06/10 21:53:44 INFO DataNucleus.Datastore: The class "org.apache.hadoop.hive.metastore.model.MOrder" is tagged as "embedded-only" so does not have its own datastore table.16/06/10 21:53:47 INFO DataNucleus.Datastore: The class "org.apache.hadoop.hive.metastore.model.MFieldSchema" is tagged as "embedded-only" so does not have its own datastore table.16/06/10 21:53:47 INFO DataNucleus.Datastore: The class "org.apache.hadoop.hive.metastore.model.MOrder" is tagged as "embedded-only" so does not have its own datastore table.16/06/10 21:53:47 INFO metastore.ObjectStore: Initialized ObjectStore16/06/10 21:53:48 WARN metastore.ObjectStore: Version information not found in metastore. hive.metastore.schema.verification is not enabled so recording the schema version 0.13.1aa16/06/10 21:53:50 INFO metastore.HiveMetaStore: Added admin role in metastore16/06/10 21:53:50 INFO metastore.HiveMetaStore: Added public role in metastore16/06/10 21:53:50 INFO metastore.HiveMetaStore: No user is added in admin role, since config is empty16/06/10 21:53:51 INFO session.SessionState: No Tez session required at this point. hive.execution.engine=mr.16/06/10 21:53:51 INFO repl.SparkILoop: Created sql context (with Hive support)..SQL context available as sqlContext.scala> 5+8res0: Int = 13scala> 2+9res1: Int = 11scala> 

0 0
原创粉丝点击