通过MapReduce程序实现对hbase表数据的插入

来源:互联网 时间:1970-01-01

import java.io.File;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
public class LoadData{

  public static void main(String[] args) throws IOException{
    Configuration conf = HBaseConfiguration;
    conf.set("hbase.zookeeper.quorum","172.17.199.4, 172.17.199.14, 172.17.199.16");//设置zookeeper集群
    conf.set("hbase.zookeeper.property.clientPort","2181");//设置zookeeper服务端口
    Job job = null;
    try{
        job = new Job(conf,"load");
        job.setJarByClass(LoadData.class);
        job.setMapperClass(LoadDataMapper.class);
        job.setInputFormatClass(TextInputFormat.class);
        FileInputFormat.addInputPath(job,new Path("hdfs://172.17.199.14:8020/BANK_DATA.txt"));
        TableMapReduceUtil.initTableReducerJob("bbb",null,job);
        job.setNumReduceTasks(0);
        TableMapReduceUtil.addDependencyJars(job);
        job.waitForCompletion(true);
    }catch(IOException e){
        e.printStackTrace();
    }catch(ClassNotFoundException e){
        e.printStackTrace();
    }catch(InterruptedException e){
        e.printStackTrace();
    }
  }
 
  public static class LoadDataMapper extends Mapper<Object,Text,ImmutableBytesWritable,Put>{
    public void map(Object key,Text value,Context context)throws IOException,InterruptedException{
        String[] st = value.toString().split("/t");
        ImmutableBytesWritable rowkey = new ImmutableBytesWritable(Bytes.toBytes(st[0]));
        Put p = new Put(Bytes.toBytes(st[0]));
        p.add(Bytes.toBytes("info"),Bytes.toBytes("bank_id"),Bytes.toBytes(st[1]));
        p.add(Bytes.toBytes("info"),Bytes.toBytes("bank_name"),Bytes.toBytes(st[2]));
        p.add(Bytes.toBytes("info"),Bytes.toBytes("area_id"),Bytes.toBytes(st[3]));
        p.add(Bytes.toBytes("info"),Bytes.toBytes("bank_type"),Bytes.toBytes(st[4]));
        context.write(rowkey,p);
    }
  }

}


相关阅读:
Top