phoenix 把CSV格式的数据导入到Hbase执行日志

来源:互联网 发布:c数据结构与算法pdf 编辑:程序博客网 时间:2024/06/10 00:25
[root@hadoop1 phoenix-4.8.2-HBase-1.2]# HADOOP_CLASSPATH=/opt/hbase-1.2.1/lib/hbase-protocol-1.2.1.jar:/etc/hbase/conf/  hadoop jar /opt/phoenix-4.8.2-HBase-1.2/phoenix-4.8.2-HBase-1.2-client.jar org.apache.phoenix.mapreduce.CsvBulkLoadTool --table "sp_address" --input /tmp/sp_address/*


17/04/08 00:05:55 INFO util.QueryUtil: Creating connection with the jdbc url: jdbc:phoenix:localhost:2181:/hbase;
17/04/08 00:05:58 INFO zookeeper.RecoverableZooKeeper: Process identifier=hconnection-0x2cd0a532 connecting to ZooKeeper ensemble=localhost:2181
17/04/08 00:05:58 INFO zookeeper.ZooKeeper: Client environment:zookeeper.version=3.4.6-1569965, built on 02/20/2014 09:09 GMT
17/04/08 00:05:58 INFO zookeeper.ZooKeeper: Client environment:host.name=hadoop1
17/04/08 00:05:58 INFO zookeeper.ZooKeeper: Client environment:java.version=1.7.0_76
17/04/08 00:05:58 INFO zookeeper.ZooKeeper: Client environment:java.vendor=Oracle Corporation
17/04/08 00:05:58 INFO zookeeper.ZooKeeper: Client environment:java.home=/opt/jdk1.7/jre
17/04/08 00:05:58 INFO zookeeper.ZooKeeper: Client environment:java.class.path=/opt/hadoop-2.6.4/etc/hadoop:/opt/hadoop-2.6.4/share/hadoop/common/lib/jsch-0.1.42.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jersey-json-1.9.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/junit-4.11.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-digester-1.8.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jsp-api-2.1.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/avro-1.7.4.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/netty-3.6.2.Final.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/activation-1.1.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jaxb-impl-2.2.3-1.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/xmlenc-0.52.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-el-1.0.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/java-xmlbuilder-0.4.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/curator-client-2.6.0.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-net-3.1.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/hadoop-annotations-2.6.4.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-httpclient-3.1.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/hadoop-auth-2.6.4.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-lang-2.6.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/slf4j-log4j12-1.7.5.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-codec-1.4.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-logging-1.1.3.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jaxb-api-2.2.2.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/slf4j-api-1.7.5.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/api-asn1-api-1.0.0-M20.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jetty-util-6.1.26.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/curator-recipes-2.6.0.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/protobuf-java-2.5.0.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-collections-3.2.2.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/curator-framework-2.6.0.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/zookeeper-3.4.6.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/servlet-api-2.5.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jasper-compiler-5.5.23.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/gson-2.2.4.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/api-util-1.0.0-M20.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/httpcore-4.2.5.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/guava-11.0.2.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jackson-core-asl-1.9.13.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jettison-1.1.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/httpclient-4.2.5.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/asm-3.2.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jersey-server-1.9.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jetty-6.1.26.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/hamcrest-core-1.3.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jsr305-1.3.9.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-io-2.4.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-compress-1.4.1.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/mockito-all-1.8.5.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jackson-mapper-asl-1.9.13.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-beanutils-core-1.8.0.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-cli-1.2.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/htrace-core-3.0.4.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/xz-1.0.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-math3-3.1.1.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/apacheds-kerberos-codec-2.0.0-M15.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-beanutils-1.7.0.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jersey-core-1.9.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/apacheds-i18n-2.0.0-M15.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/paranamer-2.3.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/log4j-1.2.17.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jasper-runtime-5.5.23.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/snappy-java-1.0.4.1.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jackson-xc-1.9.13.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/commons-configuration-1.6.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jets3t-0.9.0.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/jackson-jaxrs-1.9.13.jar:/opt/hadoop-2.6.4/share/hadoop/common/lib/stax-api-1.0-2.jar:/opt/hadoop-2.6.4/share/hadoop/common/hadoop-common-2.6.4-tests.jar:/opt/hadoop-2.6.4/share/hadoop/common/hadoop-nfs-2.6.4.jar:/opt/hadoop-2.6.4/share/hadoop/common/hadoop-common-2.6.4.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/jsp-api-2.1.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/netty-3.6.2.Final.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/xmlenc-0.52.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/commons-daemon-1.0.13.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/commons-el-1.0.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/commons-lang-2.6.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/commons-codec-1.4.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/commons-logging-1.1.3.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/jetty-util-6.1.26.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/protobuf-java-2.5.0.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/servlet-api-2.5.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/guava-11.0.2.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/jackson-core-asl-1.9.13.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/xml-apis-1.3.04.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/xercesImpl-2.9.1.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/asm-3.2.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/jersey-server-1.9.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/jetty-6.1.26.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/jsr305-1.3.9.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/commons-io-2.4.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/jackson-mapper-asl-1.9.13.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/commons-cli-1.2.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/htrace-core-3.0.4.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/jersey-core-1.9.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/log4j-1.2.17.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/lib/jasper-runtime-5.5.23.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/hadoop-hdfs-2.6.4-tests.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/hadoop-hdfs-nfs-2.6.4.jar:/opt/hadoop-2.6.4/share/hadoop/hdfs/hadoop-hdfs-2.6.4.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jersey-json-1.9.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/netty-3.6.2.Final.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/activation-1.1.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/javax.inject-1.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jaxb-impl-2.2.3-1.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/commons-httpclient-3.1.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/aopalliance-1.0.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/commons-lang-2.6.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/commons-codec-1.4.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/commons-logging-1.1.3.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jaxb-api-2.2.2.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jetty-util-6.1.26.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/protobuf-java-2.5.0.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/commons-collections-3.2.2.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jersey-client-1.9.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/zookeeper-3.4.6.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/leveldbjni-all-1.8.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/servlet-api-2.5.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/guava-11.0.2.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jackson-core-asl-1.9.13.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jettison-1.1.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/asm-3.2.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jersey-server-1.9.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/guice-3.0.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jetty-6.1.26.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jline-2.12.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jsr305-1.3.9.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/commons-io-2.4.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/commons-compress-1.4.1.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jackson-mapper-asl-1.9.13.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/commons-cli-1.2.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/xz-1.0.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/guice-servlet-3.0.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jersey-guice-1.9.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jersey-core-1.9.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/log4j-1.2.17.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jackson-xc-1.9.13.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/jackson-jaxrs-1.9.13.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/lib/stax-api-1.0-2.jar:/opt/hadoop-2.6.4/share/hadoop/yarn/hadoop-yarn-registry-2.6.4.jar:/opt/hadoop-2.6.4/sh
17/04/08 00:05:58 INFO zookeeper.ClientCnxn: Socket connection established to localhost/127.0.0.1:2181, initiating session
17/04/08 00:05:58 INFO zookeeper.ClientCnxn: Session establishment complete on server localhost/127.0.0.1:2181, sessionid = 0x15b4c5a10fc0007, negotiated timeout = 40000
17/04/08 00:06:00 INFO metrics.Metrics: Initializing metrics system: phoenix
17/04/08 00:06:00 INFO impl.MetricsConfig: loaded properties from hadoop-metrics2.properties
17/04/08 00:06:01 INFO impl.MetricsSystemImpl: Scheduled snapshot period at 10 second(s).
17/04/08 00:06:01 INFO impl.MetricsSystemImpl: phoenix metrics system started
17/04/08 00:06:07 INFO zookeeper.RecoverableZooKeeper: Process identifier=hconnection-0x65286662 connecting to ZooKeeper ensemble=localhost:2181
17/04/08 00:06:07 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:2181 sessionTimeout=90000 watcher=hconnection-0x652866620x0, quorum=localhost:2181, baseZNode=/hbase
17/04/08 00:06:07 INFO zookeeper.ClientCnxn: Opening socket connection to server localhost/127.0.0.1:2181. Will not attempt to authenticate using SASL (unknown error)
17/04/08 00:06:07 INFO zookeeper.ClientCnxn: Socket connection established to localhost/127.0.0.1:2181, initiating session
17/04/08 00:06:07 INFO zookeeper.ClientCnxn: Session establishment complete on server localhost/127.0.0.1:2181, sessionid = 0x15b4c5a10fc0008, negotiated timeout = 40000
17/04/08 00:06:07 INFO mapreduce.MultiHfileOutputFormat:  the table logical name is SP_ADDRESS
17/04/08 00:06:07 INFO client.ConnectionManager$HConnectionImplementation: Closing master protocol: MasterService
17/04/08 00:06:07 INFO client.ConnectionManager$HConnectionImplementation: Closing zookeeper sessionid=0x15b4c5a10fc0008
17/04/08 00:06:07 INFO zookeeper.ClientCnxn: EventThread shut down
17/04/08 00:06:07 INFO zookeeper.ZooKeeper: Session: 0x15b4c5a10fc0008 closed
17/04/08 00:06:07 INFO mapreduce.MultiHfileOutputFormat: Configuring 1 reduce partitions to match current region count
17/04/08 00:06:07 INFO mapreduce.MultiHfileOutputFormat: Writing partition information to /usr/lib/hadoop/tmp/partitions_826981cc-bab0-4873-8d88-b55b7030600c
17/04/08 00:06:08 INFO zlib.ZlibFactory: Successfully loaded & initialized native-zlib library
17/04/08 00:06:08 INFO compress.CodecPool: Got brand-new compressor [.deflate]
17/04/08 00:06:08 WARN mapreduce.TableMapReduceUtil: The hbase-prefix-tree module jar containing PrefixTreeCodec is not present.  Continuing without it.
17/04/08 00:06:08 INFO mapreduce.AbstractBulkLoadTool: Running MapReduce import job from /tmp/sp_address/* to /tmp/ade23bef-39e2-483b-81ab-b7a63e4483fd
17/04/08 00:06:13 INFO input.FileInputFormat: Total input paths to process : 1
17/04/08 00:06:13 INFO mapreduce.JobSubmitter: number of splits:1
17/04/08 00:06:13 INFO Configuration.deprecation: io.bytes.per.checksum is deprecated. Instead, use dfs.bytes-per-checksum
17/04/08 00:06:14 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_1491634685063_0001
17/04/08 00:06:15 INFO impl.YarnClientImpl: Submitted application application_1491634685063_0001
17/04/08 00:06:15 INFO mapreduce.Job: The url to track the job: http://hadoop1:8088/proxy/application_1491634685063_0001/
17/04/08 00:06:15 INFO mapreduce.Job: Running job: job_1491634685063_0001
17/04/08 00:06:36 INFO mapreduce.Job: Job job_1491634685063_0001 running in uber mode : false
17/04/08 00:06:36 INFO mapreduce.Job:  map 0% reduce 0%
17/04/08 00:06:49 INFO mapreduce.Job:  map 25% reduce 0%
17/04/08 00:06:52 INFO mapreduce.Job:  map 62% reduce 0%
17/04/08 00:06:54 INFO mapreduce.Job:  map 100% reduce 0%
17/04/08 00:07:07 INFO mapreduce.Job:  map 100% reduce 100%
17/04/08 00:07:07 INFO mapreduce.Job: Job job_1491634685063_0001 completed successfully
17/04/08 00:07:07 INFO mapreduce.Job: Counters: 50
        File System Counters
                FILE: Number of bytes read=498980
                FILE: Number of bytes written=1276181
                FILE: Number of read operations=0
                FILE: Number of large read operations=0
                FILE: Number of write operations=0
                HDFS: Number of bytes read=239685
                HDFS: Number of bytes written=333530
                HDFS: Number of read operations=8
                HDFS: Number of large read operations=0
                HDFS: Number of write operations=3
        Job Counters 
                Launched map tasks=1
                Launched reduce tasks=1
                Data-local map tasks=1
                Total time spent by all maps in occupied slots (ms)=14960
                Total time spent by all reduces in occupied slots (ms)=9685
                Total time spent by all map tasks (ms)=14960
                Total time spent by all reduce tasks (ms)=9685
                Total vcore-milliseconds taken by all map tasks=14960
                Total vcore-milliseconds taken by all reduce tasks=9685
                Total megabyte-milliseconds taken by all map tasks=15319040
                Total megabyte-milliseconds taken by all reduce tasks=9917440
        Map-Reduce Framework
                Map input records=7348
                Map output records=7348
                Map output bytes=484278
                Map output materialized bytes=498980
                Input split bytes=105
                Combine input records=0
                Combine output records=0
                Reduce input groups=3674
                Reduce shuffle bytes=498980
                Reduce input records=7348
                Reduce output records=18370
                Spilled Records=14696
                Shuffled Maps =1
                Failed Shuffles=0
                Merged Map outputs=1
                GC time elapsed (ms)=681
                CPU time spent (ms)=14790
                Physical memory (bytes) snapshot=522436608
                Virtual memory (bytes) snapshot=1779150848
                Total committed heap usage (bytes)=258338816
        Phoenix MapReduce Import
                Upserts Done=7348
        Shuffle Errors
                BAD_ID=0
                CONNECTION=0
                IO_ERROR=0
                WRONG_LENGTH=0
                WRONG_MAP=0
                WRONG_REDUCE=0
        File Input Format Counters 
                Bytes Read=239580
        File Output Format Counters 
                Bytes Written=333530
17/04/08 00:07:07 INFO mapreduce.AbstractBulkLoadTool: Loading HFiles from /tmp/ade23bef-39e2-483b-81ab-b7a63e4483fd
17/04/08 00:07:07 INFO Configuration.deprecation: io.bytes.per.checksum is deprecated. Instead, use dfs.bytes-per-checksum
17/04/08 00:07:07 INFO zookeeper.RecoverableZooKeeper: Process identifier=hconnection-0x2e51bd06 connecting to ZooKeeper ensemble=localhost:2181
17/04/08 00:07:07 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:2181 sessionTimeout=90000 watcher=hconnection-0x2e51bd060x0, quorum=localhost:2181, baseZNode=/hbase
17/04/08 00:07:07 INFO zookeeper.ClientCnxn: Opening socket connection to server localhost/127.0.0.1:2181. Will not attempt to authenticate using SASL (unknown error)
17/04/08 00:07:07 INFO zookeeper.ClientCnxn: Socket connection established to localhost/127.0.0.1:2181, initiating session
17/04/08 00:07:07 INFO zookeeper.ClientCnxn: Session establishment complete on server localhost/127.0.0.1:2181, sessionid = 0x15b4c5a10fc0009, negotiated timeout = 40000
17/04/08 00:07:07 INFO mapreduce.AbstractBulkLoadTool: Loading HFiles for SP_ADDRESS from /tmp/ade23bef-39e2-483b-81ab-b7a63e4483fd/SP_ADDRESS
17/04/08 00:07:07 WARN mapreduce.LoadIncrementalHFiles: managed connection cannot be used for bulkload. Creating unmanaged connection.
17/04/08 00:07:07 INFO zookeeper.RecoverableZooKeeper: Process identifier=hconnection-0x46f4a1df connecting to ZooKeeper ensemble=localhost:2181
17/04/08 00:07:07 INFO zookeeper.ZooKeeper: Initiating client connection, connectString=localhost:2181 sessionTimeout=90000 watcher=hconnection-0x46f4a1df0x0, quorum=localhost:2181, baseZNode=/hbase
17/04/08 00:07:07 INFO zookeeper.ClientCnxn: Opening socket connection to server localhost/127.0.0.1:2181. Will not attempt to authenticate using SASL (unknown error)
17/04/08 00:07:07 INFO zookeeper.ClientCnxn: Socket connection established to localhost/127.0.0.1:2181, initiating session
17/04/08 00:07:07 INFO zookeeper.ClientCnxn: Session establishment complete on server localhost/127.0.0.1:2181, sessionid = 0x15b4c5a10fc000a, negotiated timeout = 40000
17/04/08 00:07:07 INFO hfile.CacheConfig: CacheConfig:disabled
17/04/08 00:07:08 INFO mapreduce.LoadIncrementalHFiles: Trying to load hfile=hdfs://mycluster/tmp/ade23bef-39e2-483b-81ab-b7a63e4483fd/SP_ADDRESS/0/13b53620e9694a67af786743997a47e8 first=\x80\x00\x00\x01 last=\x80\x00\x0EZ
17/04/08 00:08:17 INFO client.RpcRetryingCaller: Call exception, tries=10, retries=35, started=69177 ms ago, cancelled=false, msg=row '' on table 'SP_ADDRESS' at region=SP_ADDRESS,,1491623233533.f481590417585a50a942090536335544., hostname=hadoop3,16020,1491634956292, seqNum=6
17/04/08 00:08:37 INFO client.RpcRetryingCaller: Call exception, tries=11, retries=35, started=89337 ms ago, cancelled=false, msg=row '' on table 'SP_ADDRESS' at region=SP_ADDRESS,,1491623233533.f481590417585a50a942090536335544., hostname=hadoop3,16020,1491634956292, seqNum=6
17/04/08 00:08:57 INFO client.RpcRetryingCaller: Call exception, tries=12, retries=35, started=109484 ms ago, cancelled=false, msg=row '' on table 'SP_ADDRESS' at region=SP_ADDRESS,,1491623233533.f481590417585a50a942090536335544., hostname=hadoop3,16020,1491634956292, seqNum=6
17/04/08 00:09:17 INFO client.RpcRetryingCaller: Call exception, tries=13, retries=35, started=129574 ms ago, cancelled=false, msg=row '' on table 'SP_ADDRESS' at region=SP_ADDRESS,,1491623233533.f481590417585a50a942090536335544., hostname=hadoop3,16020,1491634956292, seqNum=6
17/04/08 00:09:37 INFO client.RpcRetryingCaller: Call exception, tries=14, retries=35, started=149665 ms ago, cancelled=false, msg=row '' on table 'SP_ADDRESS' at region=SP_ADDRESS,,1491623233533.f481590417585a50a942090536335544., hostname=hadoop3,16020,1491634956292, seqNum=6
17/04/08 00:09:58 INFO client.RpcRetryingCaller: Call exception, tries=15, retries=35, started=169820 ms ago, cancelled=false, msg=row '' on table 'SP_ADDRESS' at region=SP_ADDRESS,,1491623233533.f481590417585a50a942090536335544., hostname=hadoop3,16020,1491634956292, seqNum=6
17/04/08 00:10:18 INFO client.RpcRetryingCaller: Call exception, tries=16, retries=35, started=189933 ms ago, cancelled=false, msg=row '' on table 'SP_ADDRESS' at region=SP_ADDRESS,,1491623233533.f481590417585a50a942090536335544., hostname=hadoop3,16020,1491634956292, seqNum=6
17/04/08 00:10:38 INFO client.RpcRetryingCaller: Call exception, tries=17, retries=35, started=210008 ms ago, cancelled=false, msg=row '' on table 'SP_ADDRESS' at region=SP_ADDRESS,,1491623233533.f481590417585a50a942090536335544., hostname=hadoop3,16020,1491634956292, seqNum=6
17/04/08 00:10:58 INFO client.RpcRetryingCaller: Call exception, tries=18, retries=35, started=230112 ms ago, cancelled=false, msg=row '' on table 'SP_ADDRESS' at region=SP_ADDRESS,,1491623233533.f481590417585a50a942090536335544., hostname=hadoop3,16020,1491634956292, seqNum=6
17/04/08 00:11:18 INFO client.RpcRetryingCaller: Call exception, tries=19, retries=35, started=250247 ms ago, cancelled=false, msg=row '' on table 'SP_ADDRESS' at region=SP_ADDRESS,,1491623233533.f481590417585a50a942090536335544., hostname=hadoop3,16020,1491634956292, seqNum=6

17/04/08 00:11:38 INFO client.RpcRetryingCaller: Call exception, tries=20, retries=35, started=270339 ms ago, cancelled=false, msg=row '' on table 'SP_ADDRESS' at region=SP_ADDRESS,,1491623233533.f481590417585a50a942090536335544., hostname=hadoop3,16020,1491634956292, seqNum=6

************************************************

0 0
原创粉丝点击