生成HFile以及入库到HBase

来源:互联网 发布:淘宝网app下载 编辑:程序博客网 时间:2024/05/01 11:31
1. MapReduce 生成 HFile
import java.io.IOException;    import org.apache.hadoop.conf.Configuration;  import org.apache.hadoop.fs.Path;  import org.apache.hadoop.hbase.HBaseConfiguration;  import org.apache.hadoop.hbase.KeyValue;  import org.apache.hadoop.hbase.io.ImmutableBytesWritable;  import org.apache.hadoop.hbase.mapreduce.KeyValueSortReducer;  import org.apache.hadoop.hbase.util.Bytes;  import org.apache.hadoop.io.LongWritable;  import org.apache.hadoop.io.Text;  import org.apache.hadoop.mapreduce.Job;  import org.apache.hadoop.mapreduce.Mapper;  import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;  import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;    public class TestHFileToHBase {        public static class TestHFileToHBaseMapper extends Mapper {            @Override          protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {              String[] values = value.toString().split("/t", 2);              byte[] row = Bytes.toBytes(values[0]);              ImmutableBytesWritable k = new ImmutableBytesWritable(row);              KeyValue kvProtocol = new KeyValue(row, "PROTOCOLID".getBytes(), "PROTOCOLID".getBytes(), values[1]                      .getBytes());              context.write(k, kvProtocol);                // KeyValue kvSrcip = new KeyValue(row, "SRCIP".getBytes(),              // "SRCIP".getBytes(), values[1].getBytes());              // context.write(k, kvSrcip);  //           HFileOutputFormat.getRecordWriter           }        }        public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {          Configuration conf = HBaseConfiguration.create();          Job job = new Job(conf, "TestHFileToHBase");          job.setJarByClass(TestHFileToHBase.class);            job.setOutputKeyClass(ImmutableBytesWritable.class);          job.setOutputValueClass(KeyValue.class);            job.setMapperClass(TestHFileToHBaseMapper.class);          job.setReducerClass(KeyValueSortReducer.class);  //      job.setOutputFormatClass(org.apache.hadoop.hbase.mapreduce.HFileOutputFormat.class);          job.setOutputFormatClass(HFileOutputFormat.class);          // job.setNumReduceTasks(4);          // job.setPartitionerClass(org.apache.hadoop.hbase.mapreduce.SimpleTotalOrderPartitioner.class);            // HBaseAdmin admin = new HBaseAdmin(conf);  //      HTable table = new HTable(conf, "hua");             HFileOutputFormat.configureIncrementalLoad(job, table);            FileInputFormat.addInputPath(job, new Path(args[0]));          FileOutputFormat.setOutputPath(job, new Path(args[1]));            System.exit(job.waitForCompletion(true) ? 0 : 1);      }    }  

2. HFile 入库到HBase

import org.apache.hadoop.hbase.client.HTable;  import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;  import org.apache.hadoop.hbase.util.Bytes;    public class TestLoadIncrementalHFileToHBase {        // private static final byte[] TABLE = Bytes.toBytes("hua");      // private static final byte[] QUALIFIER = Bytes.toBytes("PROTOCOLID");      // private static final byte[] FAMILY = Bytes.toBytes("PROTOCOLID");        public static void main(String[] args) throws IOException {          Configuration conf = HBaseConfiguration.create();  //      byte[] TABLE = Bytes.toBytes("hua");          byte[] TABLE = Bytes.toBytes(args[0]);          HTable table = new HTable(TABLE);          LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);          loader.doBulkLoad(new Path(args[1]), table);  //      loader.doBulkLoad(new Path("/hua/testHFileResult/"), table);      }    }

本文转载于:http://blog.csdn.net/zhuyu_deng/article/details/38868521#comments

0 0
原创粉丝点击