hadoop2.2.0 MapReduce的序列化
package com.my.hadoop.mapreduce.dataformat;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import com.my.hadoop.common.Configs;
/**
* hadoop的序列化
* @author yao
*
*/
public class DataCount {
static class DTMap extends Mapper<LongWritable, Text, Text, DataBean>{
DataBean dataBean = null;
@Override
public void map(LongWritable key, Text value, Context context) throws IOException ,InterruptedException {
String[] fields = value.toString().split("\t");
String telNo = fields[1];
long upPayLoad = Long.parseLong(fields[8]);
long downPayLoad = Long.parseLong(fields[9]);
dataBean = new DataBean(telNo, upPayLoad, downPayLoad);
context.write(new Text(telNo), dataBean);
}
}
static class DTReduce extends Reducer<Text, DataBean, Text, DataBean>{
@Override
public void reduce(Text key, Iterable<DataBean> dataBeans, Context context) throws IOException ,InterruptedException {
long upPayLoad = 0;
long downPayLoad = 0;
for (DataBean dataBean : dataBeans) {
upPayLoad += dataBean.getUpPayLoad();
downPayLoad += dataBean.getDownPayLoad();
}
DataBean dataBean = new DataBean("", upPayLoad, downPayLoad);
context.write(key, dataBean);
}
}
public static void main(String[] args) throws Exception {
Configuration conf = Configs.getConfigInstance();
String[] paths = new GenericOptionsParser(conf, args).getRemainingArgs();
if (paths.length != 2) {
System.err.println("Usage: " + DataCount.class.getName() + " <in> <out>");
System.exit(2);
}
Job job = Job.getInstance(conf, DataCount.class.getSimpleName());
job.setJarByClass(DataCount.class); //设置main函数所在的类
FileInputFormat.setInputPaths(job, new Path(args[0]));
job.setMapperClass(DTMap.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(DataBean.class);
job.setReducerClass(DTReduce.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(DataBean.class);
FileOutputFormat.setOutputPath(job, new Path(args[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1); //等待MapReduce执行完成并打印作业进度详情
}
}
/**
* 实现Writable接口,重写Write方法和readFields方法,严格按字段顺序进行写入写出
* @author yao
*
*/
class DataBean implements Writable {
private String telNo;
private long upPayLoad;
private long downPayLoad;
private long totalPayLoad;
public DataBean(){
}
public DataBean(String telNo, long upPayLoad, long downPayLoad) {
super();
this.telNo = telNo;
this.upPayLoad = upPayLoad;
this.downPayLoad = downPayLoad;
this.totalPayLoad = upPayLoad + downPayLoad;
}
@Override
public void readFields(DataInput in) throws IOException {
this.telNo = in.readUTF();
this.upPayLoad = in.readLong();
this.downPayLoad = in.readLong();
this.totalPayLoad = in.readLong();
}
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(telNo);
out.writeLong(upPayLoad);
out.writeLong(downPayLoad);
out.writeLong(totalPayLoad);
}
@Override
public String toString() {
return this.telNo+"\t"+this.upPayLoad+"\t"+this.downPayLoad+"\t"+this.totalPayLoad;
}
public String getTelNo() {
return telNo;
}
public void setTelNo(String telNo) {
this.telNo = telNo;
}
public long getUpPayLoad() {
return upPayLoad;
}
public void setUpPayLoad(long upPayLoad) {
this.upPayLoad = upPayLoad;
}
public long getDownPayLoad() {
return downPayLoad;
}
public void setDownPayLoad(long downPayLoad) {
this.downPayLoad = downPayLoad;
}
public long getTotalPayLoad() {
return totalPayLoad;
}
public void setTotalPayLoad(long totalPayLoad) {
this.totalPayLoad = totalPayLoad;
}
}
hadoop2.2.0 MapReduce的序列化的更多相关文章
- hadoop2.2.0 MapReduce求和并排序
javabean必须实现WritableComparable接口,并实现该接口的序列化,反序列话和比较方法 package com.my.hadoop.mapreduce.sort; import j ...
- hadoop2.2.0 MapReduce分区
package com.my.hadoop.mapreduce.partition; import java.util.HashMap;import java.util.Map; import org ...
- 国内最全最详细的hadoop2.2.0集群的MapReduce的最简单配置
简介 hadoop2的中的MapReduce不再是hadoop1中的结构已经没有了JobTracker,而是分解成ResourceManager和ApplicationMaster.这次大变革被称为M ...
- 编写简单的Mapreduce程序并部署在Hadoop2.2.0上运行
今天主要来说说怎么在Hadoop2.2.0分布式上面运行写好的 Mapreduce 程序. 可以在eclipse写好程序,export或用fatjar打包成jar文件. 先给出这个程序所依赖的Mave ...
- Hadoop2.2.0 第一步完成MapReduce wordcount计算文本数量
1.完成Hadoop2.2.0单机版环境搭建之后需要利用一个例子程序来检验hadoop2 的mapreduce的功能 //启动hdfs和yarn sbin/start-dfs.sh sbin/star ...
- 【hadoop2.6.0】用C++ 编写mapreduce
hadoop通过hadoop streaming 来实现用非Java语言写的mapreduce代码. 对于一个一点Java都不会的我来说,这真是个天大的好消息. 官网上hadoop streaming ...
- 使用命令行编译打包运行自己的MapReduce程序 Hadoop2.6.0
使用命令行编译打包运行自己的MapReduce程序 Hadoop2.6.0 网上的 MapReduce WordCount 教程对于如何编译 WordCount.java 几乎是一笔带过… 而有写到的 ...
- Hadoop-2.2.0 (传 hadoop-2.2.0.tar.gz)
配置hadoop 2.1 上传hadoop包 2.2 解压hadoop包 首先在根目录下创建一个cloud目录 mkdir /cloud tar -zxvf hadoop-2.2.0.tar.gz - ...
- Hadoop2.2.0安装过程记录
1 安装环境1.1 客户端1.2 服务端1.3 安装准备 2 操作系统安装2.1.1 BIOS打开虚拟化支持2.1.2 关闭防火墙2.1.3 安装 ...
随机推荐
- apache 配置https(转)
主要讲述在windows下apache配置SSL以实现http转换为https SSL: SSl是为Http传输提供安全的协议,通过证书认证来确保客户端和网站服务器之间的数据是安全.也就是说在SSL下 ...
- Sandcastle Help File Builder使用教程
Sandcastle Help File Builder相信很多的园友用过,小弟我最近因为工作原因需要生成公司的一套SDK的帮助文档,因此找了一些资料,发现网上的资料很多,但是都不怎么完全,有些只是随 ...
- python的运算符
#coding=utf-8#"+"两个对象相加#两个数字相加a=7+8print a #两个字符串相加b="GOOD"+"JOB"print ...
- 使用HighCharts实现实时数据展示
在众多的工业控制系统领域常常会实时采集现场的温度.压力.扭矩等数据,这些数据对于监控人员进行现场态势感知.进行未来趋势预测具有重大指导价值.工程控制人员如果只是阅读海量的数据报表,对于现场整个态势的掌 ...
- 多层架构+MVC+EF+AUTOFAC+AUTOMAPPER
最近使用ligerui搭建了一个简单的教务管理demo,将重要的地方记录,也希望能帮到有这方面需要园友. 一.目录 1.多层架构+MVC+EF+AUTOFAC+AUTOMAPPER: 2.MVC中验证 ...
- Namespace declaration statement has to be the very first
Namespace declaration statement has to be the very first statement in the script 我新建了一个Homea模块,并把Hom ...
- 关于Adobe Flash 11.3 引起的火狐使用问题
Adobe Flash 更新到11.3之后,为火狐引入Flash沙盒安全模式,但同时,又造成了部分兼容性问题,导致 Windows vista及 Windows 7上部分火狐崩溃,并致使一些使用Fla ...
- cocos2d-x创建新项目模板
1.起因 长期使用项目中自带的HelloWorldScene来创建模板工程,不知大家有木有感到厌烦? 我是个懒人,所以就弄了个新的模板工程.这样最起码可以不用每次都把HelloWorldScene删掉 ...
- thinkphp M 和模板用法
<?phpnamespace Home\Controller;use Think\Controller;class IndexController extends Controller { pu ...
- seo初学
对前端而言,做网站采用扁平式结构:控制网页链接数量,不能太少,当然也不能太多:其次采用扁平的目录层次,不能超过3次:三:导航优化,最好是文字,如果是图片的话,alt和title必须添加. 面包屑导航: ...