hadoop、hbase解决需求jar包问题
来源:互联网 发布:手机必备软件大全 编辑:程序博客网 时间:2024/05/16 09:58
1、出现的问题
编写完java代码,实现一个给hbase创建表的实验需求。
import java.io.IOException;import java.util.ArrayList;import java.util.List;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.HColumnDescriptor;import org.apache.hadoop.hbase.HTableDescriptor;import org.apache.hadoop.hbase.KeyValue;import org.apache.hadoop.hbase.MasterNotRunningException;import org.apache.hadoop.hbase.ZooKeeperConnectionException;import org.apache.hadoop.hbase.client.Delete;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.HBaseAdmin;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.HTablePool;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.Filter;import org.apache.hadoop.hbase.filter.FilterList;import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;import org.apache.hadoop.hbase.util.Bytes;public class SqTest { public static Configuration configuration; static { configuration = HBaseConfiguration.create(); configuration.set("hbase.zookeeper.property.clientPort", "2181"); configuration.set("hbase.zookeeper.quorum", "192.168.18.101"); configuration.set("hbase.master", "192.168.8.101:60000"); } public static void main(String[] args) { createTable("sq");} public static void createTable(String tableName) { System.out.println("start create table ......"); try { HBaseAdmin hBaseAdmin = new HBaseAdmin(configuration); if (hBaseAdmin.tableExists(tableName)) { hBaseAdmin.disableTable(tableName); hBaseAdmin.deleteTable(tableName); System.out.println(tableName + " is exist,detele...."); } HTableDescriptor tableDescriptor = new HTableDescriptor(tableName); tableDescriptor.addFamily(new HColumnDescriptor("column1")); tableDescriptor.addFamily(new HColumnDescriptor("column2")); tableDescriptor.addFamily(new HColumnDescriptor("column3")); hBaseAdmin.createTable(tableDescriptor); } catch (MasterNotRunningException e) { e.printStackTrace(); } catch (ZooKeeperConnectionException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } System.out.println("end create table ......"); }}
在Hadoop环境下运行,用jdk进行javac编译,出现了下列提示问题:
SqTest.java:6: error: package org.apache.hadoop.hbase does not existimport org.apache.hadoop.hbase.HBaseConfiguration; SqTest.java:7: error: package org.apache.hadoop.hbase does not existimport org.apache.hadoop.hbase.HColumnDescriptor; SqTest.java:8: error: package org.apache.hadoop.hbase does not existimport org.apache.hadoop.hbase.HTableDescriptor; SqTest.java:9: error: package org.apache.hadoop.hbase does not existimport org.apache.hadoop.hbase.KeyValue; ^SqTest.java:10: error: package org.apache.hadoop.hbase does not existimport org.apache.hadoop.hbase.MasterNotRunningException; SqTest.java:11: error: package org.apache.hadoop.hbase does not existimport org.apache.hadoop.hbase.ZooKeeperConnectionException; SqTest.java:12: error: package org.apache.hadoop.hbase.client does not existimport org.apache.hadoop.hbase.client.Delete; SqTest.java:13: error: package org.apache.hadoop.hbase.client does not existimport org.apache.hadoop.hbase.client.Get; SqTest.java:14: error: package org.apache.hadoop.hbase.client does not existimport org.apache.hadoop.hbase.client.HBaseAdmin; SqTest.java:15: error: package org.apache.hadoop.hbase.client does not existimport org.apache.hadoop.hbase.client.HTable; SqTest.java:16: error: package org.apache.hadoop.hbase.client does not existimport org.apache.hadoop.hbase.client.HTablePool; 。。。。。。
提示就是缺少需求的jar包。
2、解决办法
想到3个解决办法:
① 将依赖的jar包放在每个节点的jdk/jre/lib/ext/目录下面。
这种办法最好少使用。
② 将依赖的jar包的地址全写进系统环境中,通过命令的方式找出需求的jar包地址,
[hadoop@bd2 ~]$ hadoop classpath/home/hadoop/hadoop-2.6.0/etc/hadoop:/home/hadoop/hadoop-2.6.0/share/hadoop/common/lib/*:/home/hadoop/hadoop-2.6.0/share/hadoop/common/*:/home/hadoop/hadoop-2.6.0/share/hadoop/hdfs:/home/hadoop/hadoop-2.6.0/share/hadoop/hdfs/lib/*:/home/hadoop/hadoop-2.6.0/share/hadoop/hdfs/*:/home/hadoop/hadoop-2.6.0/share/hadoop/yarn/lib/*:/home/hadoop/hadoop-2.6.0/share/hadoop/yarn/*:/home/hadoop/hadoop-2.6.0/share/hadoop/mapreduce/lib/*:/home/hadoop/hadoop-2.6.0/share/hadoop/mapreduce/*:/home/hadoop/hadoop-2.6.0/contrib/capacity-scheduler/*.jar
类似的也有
[hadoop@bd2 ~]$ hbase classpath/usr/local/hbase-1.0.3/conf:/usr/local/jdk1.7.0_79/lib/tools.jar:/usr/local/hbase-1.0.3:/usr/local/hbase-1.0.3/lib/activation-1.1.jar:/usr/local/hbase-1.0.3/lib/aopalliance-1.0.jar:/usr/local/hbase-1.0.3/lib/apacheds-i18n-2.0.0-M15.jar:/usr/local/hbase-1.0.3/lib/apacheds-kerberos-codec-2.0.0-M15.jar:/usr/local/hbase-1.0.3/lib/api-asn1-api-1.0.0-M20.jar:/usr/local/hbase-1.0.3/lib/api-util-1.0.0-M20.jar:/usr/local/hbase-1.0.3/lib/asm-3.1.jar:/usr/local/hbase-1.0.3/lib/avro-1.7.4.jar:/usr/local/hbase-1.0.3/lib/commons-beanutils-1.7.0.jar:/usr/local/hbase-1.0.3/lib/commons-beanutils-core-1.8.0.jar:/usr/local/hbase-1.0.3/lib/commons-cli-1.2.jar:/usr/local/hbase-1.0.3/lib/commons-codec-1.9.jar:/usr/local/hbase-1.0.3/lib/commons-collections-3.2.2.jar:/usr/local/hbase-1.0.3/lib/commons-compress-1.4.1.jar:/usr/local/hbase-1.0.3/lib/commons-configuration-1.6.jar:/usr/local/hbase-1.0.3/lib/commons-daemon-1.0.13.jar:/usr/local/hbase-1.0.3/lib/commons-digester-1.8.jar:/usr/local/hbase-1.0.3/lib/commons-el-1.0.jar:/usr/local/hbase-1.0.3/lib/commons-httpclient-3.1.jar
vi .bash_profile
把上面的需求jar包的地址按照下面的格式写在后面,然后 source .bash_profile
export CLASSPATH=.:/home/hadoop/hadoop-2.6.0/etc/hadoop:/home/hadoop/hadoop-2.6.0/share/hadoop/common/lib/*:/home/hadoop/hadoop-2.6.0/share/hadoop/common/*:/home/hadoop/hadoop-2.6.0/share/hadoop/hdfs:/home/hadoop/hadoop-2.6.0/share/hadoop/hdfs/lib/*:/home/hadoop/hadoop-2.6.0/share/hadoop/hdfs/*:/home/hadoop/hadoop-2.6.0/share/hadoop/yarn/lib/*:/home/hadoop/hadoop-2.6.0/share/hadoop/yarn/*:/home/hadoop/hadoop-2.6.0/share/hadoop/mapreduce/lib/*:/home/hadoop/hadoop-2.6.0/share/hadoop/mapreduce/*:/home/hadoop/hadoop-2.6.0/contrib/capacity-scheduler/*.jar
③ 在创建项目的时候,将需求的jar放在lib目录下面,编译好源代码后,将jar包和class类文件一起打包即可。
0 0
- hadoop、hbase解决需求jar包问题
- hadoop jar运行hbase相关jar包的classpath问题
- Hbase与Hadoop结合出现的jar包冲突问题
- hadoop MapReduce程序中解决第三方jar包问题
- 通过maven-shade-plugin 解决Elasticsearch与hbase的jar包冲突问题
- 通过maven-shade-plugin 解决Elasticsearch与hbase的jar包冲突问题
- 通过maven-shade-plugin 解决Elasticsearch与hbase的jar包冲突问题
- 解决jar包冲突问题
- Hadoop MapReduce程序中解决第三方jar包问题--终极解决方案
- Hadoop MapReduce程序中解决第三方jar包问题--终极解决方案
- Hadoop MapReduce程序中解决第三方jar包问题--终极解决方案
- 解决Hadoop运行jar包时MapReduce任务启动前OutOfMemoryError:Java heap space问题
- 解决 Hadoop 集群执行包含 third-part lib 的 jar包的 ClassNotFoundException 问题
- Hbase连接异常,JAR包冲突解决攻略
- ClassLoader解决jar包冲突问题
- 解决eclipse中找不到jar包问题
- 解决jar包冲突的问题
- eclipse解决jar包重复问题
- MySQL服务器的连接数设置
- WebView加载网页基本配置
- Android studio 安装 opencv-Android环境搭建
- 利用模板实现Stack
- hcm20170426
- hadoop、hbase解决需求jar包问题
- Linux proc/%d/stat文件详解
- 苹果应用上架,一些信息的勾选(2017年4月27日)
- Mybatis 多表关联查询(2) one-to-many关系
- JS中的call()方法和apply()方法用法总结
- Can we pass nontype parameters to templates?
- Quartz 2.2.3持久化
- 笔记2-JAVA的初学浅识(开发环境安装)
- 数据统计分析——常用统计检验方法