hadoop2的mapreduce操作hbase数据
1、从hbase中取数据,再把计算结果插入hbase中
package com.yeliang;
import java.io.IOException; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapred.TableInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; /**
* 从hbase里取数据,分析完成后插入到hbase里
* @author liang.ye
*
*/
public class FamilyHBase { public static class Map extends TableMapper<Text, IntWritable>{ @Override
protected void map(ImmutableBytesWritable key, Result value, Context context)
throws IOException, InterruptedException {
for (Cell cell : value.rawCells())
{
if(new String(CellUtil.cloneQualifier(cell)).equals("GroupID")){
context.write(new Text(new String(CellUtil.cloneValue(cell))), new IntWritable(1));
}
}
}
} public static class Reduce extends
TableReducer<Text, IntWritable, NullWritable> {
public void reduce(Text key, Iterable<IntWritable> values,
Context context) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable i : values) {
sum += i.get();
}
byte[] keyBytes = Bytes.toBytes(key.toString());
if(keyBytes.length>0){
Put put = new Put(keyBytes);
// Put实例化,每一个词存一行
put.add(Bytes.toBytes("content"), Bytes.toBytes("count"),
Bytes.toBytes(String.valueOf(sum)));
// 列族为content,列为count,列值为数目
context.write(NullWritable.get(), put);
}
}
} public static void createHBaseTable(String tableName) throws IOException {
HTableDescriptor htd = new HTableDescriptor(tableName);
HColumnDescriptor col = new HColumnDescriptor("content");
htd.addFamily(col);
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum","192.168.56.101,192.168.56.102,192.168.56.103");
HBaseAdmin admin = new HBaseAdmin(conf);
if (admin.tableExists(tableName)) {
System.out.println("table exists, trying to recreate table......");
admin.disableTable(tableName);
admin.deleteTable(tableName);
}
System.out.println("create new table:" + tableName);
admin.createTable(htd);
} public static void main(String[] args) throws IOException,
InterruptedException, ClassNotFoundException {
String tableName = "family_sum_by_groupid";
Configuration conf = HBaseConfiguration.create();
//conf.set("hbase.zookeeper.quorum","192.168.56.101,192.168.56.102,192.168.56.103");
createHBaseTable(tableName);
Job job = new Job(conf, "family_sum_by_groupid ");
job.setJarByClass(FamilyHBase.class);
Scan scan = new Scan();
scan.addFamily("cf".getBytes());
TableMapReduceUtil.initTableMapperJob("family3", scan, Map.class, Text.class, IntWritable.class, job);
TableMapReduceUtil.initTableReducerJob(tableName, Reduce.class, job);
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
2、从hdfs中取数据,把计算的结果插入到hdfs中
import java.io.IOException; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.mapreduce.TableOutputFormat;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; /**
* 从hdfs中取数分析,然后插入到hbase里
* @author liang.ye
*
*/
public class WordCountHBase { public static class Map extends
Mapper<LongWritable, Text, Text, IntWritable> {
private IntWritable i = new IntWritable(1); public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String s[] = value.toString().trim().split(" ");
// 将输入的每行以空格分开
for (String m : s) {
context.write(new Text(m), i);
}
}
} public static class Reduce extends
TableReducer<Text, IntWritable, NullWritable> {
public void reduce(Text key, Iterable<IntWritable> values,
Context context) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable i : values) {
sum += i.get();
}
byte[] keyBytes = Bytes.toBytes(key.toString());
if(keyBytes.length>0){
Put put = new Put(keyBytes);
// Put实例化,每一个词存一行
put.add(Bytes.toBytes("content"), Bytes.toBytes("count"),
Bytes.toBytes(String.valueOf(sum)));
// 列族为content,列为count,列值为数目
context.write(NullWritable.get(), put);
}
}
} public static void createHBaseTable(String tableName) throws IOException {
HTableDescriptor htd = new HTableDescriptor(tableName);
HColumnDescriptor col = new HColumnDescriptor("content");
htd.addFamily(col);
Configuration conf = HBaseConfiguration.create();
conf.set("hbase.zookeeper.quorum","192.168.56.101,192.168.56.102,192.168.56.103");
HBaseAdmin admin = new HBaseAdmin(conf);
if (admin.tableExists(tableName)) {
System.out.println("table exists, trying to recreate table......");
admin.disableTable(tableName);
admin.deleteTable(tableName);
}
System.out.println("create new table:" + tableName);
admin.createTable(htd);
} public static void main(String[] args) throws IOException,
InterruptedException, ClassNotFoundException {
String tableName = "WordCount2";
Configuration conf = new Configuration();
conf.set(TableOutputFormat.OUTPUT_TABLE, tableName);
createHBaseTable(tableName);
String input = args[0];
Job job = new Job(conf, "WordCount table with " + input);
job.setJarByClass(WordCountHBase.class);
job.setNumReduceTasks(3);
job.setMapperClass(Map.class);
job.setReducerClass(Reduce.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TableOutputFormat.class);
FileInputFormat.addInputPath(job, new Path(input));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
hadoop2的mapreduce操作hbase数据的更多相关文章
- Mapreduce操作HBase
这个操作和普通的Mapreduce还不太一样,比如普通的Mapreduce输入可以是txt文件等,Mapreduce可以直接读取Hive中的表的数据(能够看见是以类似txt文件形式),但Mapredu ...
- Hadoop生态圈-使用MapReduce处理HBase数据
Hadoop生态圈-使用MapReduce处理HBase数据 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任. 一.对HBase表中数据进行单词统计(TableInputFormat) ...
- HBase 相关API操练(三):MapReduce操作HBase
MapReduce 操作 HBase 在 HBase 系统上运行批处理运算,最方便和实用的模型依然是 MapReduce,如下图所示. HBase Table 和 Region 的关系类似 HDFS ...
- 7.MapReduce操作Hbase
7 HBase的MapReduce HBase中Table和Region的关系,有些类似HDFS中File和Block的关系.由于HBase提供了配套的与MapReduce进行交互的API如 Ta ...
- Hbase第五章 MapReduce操作HBase
容易遇到的坑: 当用mapReducer操作HBase时,运行jar包的过程中如果遇到 java.lang.NoClassDefFoundError 类似的错误时,一般是由于hadoop环境没有hba ...
- Hbase理论&&hbase shell&&python操作hbase&&python通过mapreduce操作hbase
一.Hbase搭建: 二.理论知识介绍: 1Hbase介绍: Hbase是分布式.面向列的开源数据库(其实准确的说是面向列族).HDFS为Hbase提供可靠的底层数据存储服务,MapReduce为Hb ...
- HBase学习之路 (五)MapReduce操作Hbase
MapReduce从HDFS读取数据存储到HBase中 现有HDFS中有一个student.txt文件,格式如下 95002,刘晨,女,19,IS 95017,王风娟,女,18,IS 95018,王一 ...
- 使用MapReduce读取HBase数据存储到MySQL
Mapper读取HBase数据 package MapReduce; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hba ...
- MapReduce操作Hbase --table2file
官方手册:http://hbase.apache.org/book.html#mapreduce.example 简单的操作,将hbase表中的数据写入到文件中. RunJob 源码: import ...
随机推荐
- Safe Area Layout Guide
原文:Safe Area Layout Guide Apple在iOS 7中引入了topLayoutGuide和bottomLayoutGuide作为UIViewController属性.它们允许您创 ...
- RG_4
集训前半段马上就要结束了. 很多作业等待着我. 真希望作业君不喜欢我.
- DOM中document对象的常用属性方法
每个载入浏览器的 HTML 文档都会成为 Document 对象. Document 对象使我们可以从脚本中对 HTML 页面中的所有元素进行访问. 属性 1 document.anchors 返 ...
- 【JVM命令系列】jstat
命令基本概述 Jstat是JDK自带的一个轻量级小工具.全称"Java Virtual Machine statistics monitoring tool",它位于java的bi ...
- css3换行的三方式的对比(整理)
CSS3 文本换行 (转载..) 作者 张歆琳 关注 2016.06.20 10:49* 字数 1101 阅读 676评论 1喜欢 6 文本换行其实是个非常常用但并不起眼的特性.你什么都不用设,浏览器 ...
- Python打印乘法口诀表
思路:第一行:1*1,第二行:1*2.,2*2,第三行:1*3,2*3,3*3-- 最后一行:1*9,2*9,3*9,-9*9,以此类推,可以设2个数:i,j:让 i 从1循环到9,让 j 从1到小于 ...
- linux备份文件脚本
#!/bin/sh #Author: Opal TODAY=`date +%Y%m%d` YESTERDAY=`date -d"-1 day" +%Y%m%d` mkdir -p ...
- 解决VMware下安装Ubuntu 16.04 不支持1920X1080分辨率的问题
解决方法: flashmx@ubuntu:~$ cvt # 192.07M9) hsync: 67.16 kHz; pclk: 173.00 MHz Modeline -hsync +vsync fl ...
- Python系列之内置函数
内置函数 一.数学运算类: abs(a):求绝对值如果参数是个复数则返回复数的模. a = abs(-1) print(a) >>>1 compilex([real[, imag]] ...
- ZOJ2110 HDU1010 搜索 Tempter of the Bone
传送门:Tempter of the Bone 大意是给一个矩阵,叫你是否可以在给定的可走路径上不重复地走,在最后一秒走到终点. 我用了两个剪枝,且称其为简直001和剪枝002,事实证明001不要都可 ...