MapReduce如何读写HBASE-创新互联-成都快上网建站

MapReduce如何读写HBASE-创新互联

小编给大家分享一下MapReduce如何读写HBASE,相信大部分人都还不怎么了解,因此分享这篇文章给大家参考一下,希望大家阅读完这篇文章后大有收获,下面让我们一起去了解一下吧!

创新互联建站专注于企业营销型网站建设、网站重做改版、金州网站定制设计、自适应品牌网站建设、成都h5网站建设成都做商城网站、集团公司官网建设、成都外贸网站建设公司、高端网站制作、响应式网页设计等建站业务,价格优惠性价比高,为金州等各大城市提供网站开发制作服务。




import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import java.util.Vector;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.commons.lang.Validate;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.eclipse.jdt.internal.codeassist.complete.CompletionOnArgumentName;


import sun.swing.MenuItemLayoutHelper.ColumnAlignment;


import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;


public class MapHbase {


public static class TokenizerMapper_hbase extends TableMapper {
public void map(ImmutableBytesWritable key, Result value, Context context)
throws IOException, InterruptedException {


List kvs = value.list();
for (KeyValue val : kvs) {
System.out.println("column " + new String(val.getQualifier()) + " value " + new String(val.getValue()) + " key " + new String(val.getRow()));
String colname = new String(val.getQualifier());
String colvalue = new String(val.getValue());
context.write(new Text(val.getRow()), new Text(colname + "#" + colvalue));
}
}
}


public static class IntSumReducer_hbase extends TableReducer {
public void reduce(Text key, Iterable values, Context context)
throws IOException, InterruptedException {

Put put = new Put(Bytes.toBytes(key.toString()));
for (Text i : values) {

String val[] = i.toString().split("#");
if(val.length == 2){
String colname = val[0];
String colvalue = val[1];
put.add(Bytes.toBytes("cf"), Bytes.toBytes(colname), Bytes.toBytes(colvalue));
} else {
String colname = val[0];
put.add(Bytes.toBytes("cf"), Bytes.toBytes(colname), Bytes.toBytes(""));
}


}


context.write(null, put);
}
}


public static void main(String[] args) throws Exception {
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum",
"datanode01.isesol.com,datanode02.isesol.com,datanode03.isesol.com,datanode04.isesol.com,cmserver.isesol.com");
conf.set("hbase.zookeeper.property.clientPort", "2181");
Scan scan = new Scan();
scan.addFamily(Bytes.toBytes("cf"));
//scan.addColumn(Bytes.toBytes("cf"), Bytes.toBytes("content"));
//scan.setMaxVersions();
Job job = new Job(conf, "t_ui_all");
job.setJarByClass(MapHbase.class);
// job.setMapperClass(TokenizerMapper_hbase.class);
// job.setMapOutputKeyClass(ImmutableBytesWritable.class);
// job.setMapOutputValueClass(Text.class);
TableMapReduceUtil.initTableMapperJob("t_ui_all", scan, TokenizerMapper_hbase.class, Text.class,
Text.class, job);
TableMapReduceUtil.initTableReducerJob("test2", IntSumReducer_hbase.class, job);
// FileInputFormat.addInputPath(job, new Path(args[0]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}

以上是“MapReduce如何读写HBASE”这篇文章的所有内容,感谢各位的阅读!相信大家都有了一定的了解,希望分享的内容对大家有所帮助,如果还想学习更多知识,欢迎关注创新互联-成都网站建设公司行业资讯频道!


网站题目:MapReduce如何读写HBASE-创新互联
文章出自:http://kswjz.com/article/dossgi.html
扫二维码与项目经理沟通

我们在微信上24小时期待你的声音

解答本文疑问/技术咨询/运营咨询/技术建议/互联网交流