Hadoop之Hive UDAF TopN函数实现
public class GenericUDAFTopNRow extends AbstractGenericUDAFResolver {
@Override
public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
throws SemanticException {
if (parameters.length < 2) {
throw new UDFArgumentTypeException(parameters.length - 1,"At least two argument is expected.");
}
if(!(TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(parameters[0]) instanceof WritableIntObjectInspector)){
throw new UDFArgumentTypeException(0,"The first argument must be integer,"+TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(parameters[0]).getClass());
}
if (!ObjectInspectorUtils.compareSupported(TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(parameters[1]))) {
throw new UDFArgumentTypeException(1,"Cannot support comparison of map<> type or complex type containing map<>.");
}
return new TopNEvaluator();
}
static class TopNBuffer implements AggregationBuffer {
List<Object[]> container;
}
public static class TopNEvaluator extends GenericUDAFEvaluator {
int size;
String[] fieldNM;
ObjectInspector[] fieldOI;
ObjectInspector[] originalOI;
StandardListObjectInspector partialOI;
StandardStructObjectInspector partialElemOI;
@Override
public ObjectInspector init(Mode m, ObjectInspector[] parameters)
throws HiveException {
super.init(m, parameters);
if (m == Mode.PARTIAL1 || m == Mode.COMPLETE) {
this.originalOI = new ObjectInspector[parameters.length];
System.arraycopy(parameters, 0, this.originalOI, 0, parameters.length);
this.size = parameters.length-1;
this.fieldNM = new String[this.size];
this.fieldOI = new ObjectInspector[this.size];
for (int i = 0; i < this.size; i++) {
this.fieldNM[i] = "f" + i;
this.fieldOI[i] = ObjectInspectorUtils.getStandardObjectInspector(parameters[i+1]);
}
return ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(Arrays.asList(this.fieldNM),Arrays.asList(this.fieldOI)));
} else if (m == Mode.PARTIAL2 || m == Mode.FINAL) {
this.partialOI = (StandardListObjectInspector) parameters[0];
this.partialElemOI=(StandardStructObjectInspector) this.partialOI.getListElementObjectInspector();
List<? extends StructField> structFieldRefs = this.partialElemOI.getAllStructFieldRefs();
this.size = structFieldRefs.size();
this.fieldNM = new String[this.size];
this.fieldOI = new ObjectInspector[this.size];
for (int i = 0; i < this.size; i++) {
StructField sf = structFieldRefs.get(i);
this.fieldNM[i] = sf.getFieldName();
this.fieldOI[i] = sf.getFieldObjectInspector();
}
return ObjectInspectorUtils.getStandardObjectInspector(this.partialOI);
}
return null;
}
@Override
public AggregationBuffer getNewAggregationBuffer() throws HiveException {
TopNBuffer buffer = new TopNBuffer();
reset(buffer);
return buffer;
}
@Override
public void reset(AggregationBuffer agg) throws HiveException {
TopNBuffer buffer = (TopNBuffer) agg;
buffer.container = new LinkedList<Object[]>();
}
@Override
public void iterate(AggregationBuffer agg, Object[] parameters)
throws HiveException {
/*如果查询结果为空,不作处理*/
if(isEmptySet(agg,parameters)){
return;
}
TopNBuffer buffer = (TopNBuffer) agg;
int n = ((WritableIntObjectInspector)this.originalOI[0]).get(parameters[0]);
int s = buffer.container.size();
if(s < n){
Object[] elemVal = new Object[this.size];
for (int j = 0; j < this.size; j++) {
elemVal[j] = ObjectInspectorUtils.copyToStandardObject(parameters[j+1],this.originalOI[j+1]);
}
buffer.container.add(elemVal);
/*make sure the size should be n*/
while(buffer.container.size() < n){
buffer.container.add(new Object[this.size]);
}
}else{
for(int i = 0;i < s;i++){
if (ObjectInspectorUtils.compare(buffer.container.get(i)[0],this.fieldOI[0], parameters[1], this.originalOI[1]) < 0) {
Object[] elemVal = new Object[this.size];
for(int j=0;j<this.size;j++){
elemVal[j] = ObjectInspectorUtils.copyToStandardObject(parameters[j+1],this.originalOI[j+1]);
}
buffer.container.add(i, elemVal);
break;
}
}
/*make sure the size should be n*/
while(buffer.container.size() > n){
buffer.container.remove(n);
}
}
}
@Override
public Object terminatePartial(AggregationBuffer agg)
throws HiveException {
TopNBuffer buffer = (TopNBuffer) agg;
return buffer.container.isEmpty()?null:buffer.container;
}
@Override
public void merge(AggregationBuffer agg, Object partial) {
/*如果查询结果为空,不作处理*/
if(isEmptySet(agg,partial)){
return;
}
TopNBuffer buffer = (TopNBuffer) agg;
List<?> listVal = this.partialOI.getList(partial);
final int cn = Math.max(buffer.container.size(), listVal.size());
List<Object[]> values = new LinkedList<Object[]>();
for(Object elemObj:listVal){
List<Object> elemVal=this.partialElemOI.getStructFieldsDataAsList(elemObj);
Object[] value=new Object[this.size];
for(int i=0,n=elemVal.size();i<n;i++){
value[i]=ObjectInspectorUtils.copyToStandardObject(elemVal.get(i), this.fieldOI[i]);
}
values.add(value);
}
buffer.container=mergeSortNotNull(buffer.container, values);
while(buffer.container.size()<cn){
buffer.container.add(new Object[this.size]);
}
while(buffer.container.size() > cn){
buffer.container.remove(cn);
}
}
@Override
public Object terminate(AggregationBuffer agg) throws HiveException {
TopNBuffer buffer = (TopNBuffer) agg;
return buffer.container.isEmpty()?null:buffer.container;
}
private List<Object[]> mergeSortNotNull(List<Object[]> list1, List<Object[]> list2){
List<Object[]> result=new LinkedList<Object[]>();
int i1=0, i2=0, n1=list1.size(), n2=list2.size();
while(i1<n1 && i2<n2){
if(list1.get(i1)[0]==null){
i1++;
continue;
}
if(list2.get(i2)[0]==null){
i2++;
continue;
}
int cp = ObjectInspectorUtils.compare(list1.get(i1)[0],this.fieldOI[0], list2.get(i2)[0], this.fieldOI[0]);
if(cp > 0){
result.add(list1.get(i1));
i1++;
}else if(cp<0){
result.add(list2.get(i2));
i2++;
}else{
result.add(list1.get(i1));
i1++;
i2++;
}
}
while(i1<n1){
if(list1.get(i1)[0]==null){
i1++;
continue;
}
result.add(list1.get(i1));
i1++;
}
while(i2<n2){
if(list2.get(i2)[0]==null){
i2++;
continue;
}
result.add(list2.get(i2));
i2++;
}
return result;
}
private boolean isEmptySet(AggregationBuffer agg, Object[] parameters){
if(agg==null || parameters==null){
return true;
}else{
for(int i=0; i<parameters.length; i++){
if(parameters[i]!=null){
return false;
}
}
return true;
}
}
private boolean isEmptySet(AggregationBuffer agg, Object parameter){
return (agg==null) || (parameter==null);
}
}
}
Hadoop之Hive UDAF TopN函数实现的更多相关文章
- Hadoop生态圈-Hive的自定义函数之UDAF(User-Defined Aggregation Function)
Hadoop生态圈-Hive的自定义函数之UDAF(User-Defined Aggregation Function) 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任.
- Hadoop生态圈-hive编写自定义函数
Hadoop生态圈-hive编写自定义函数 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任.
- Hadoop生态圈-Hive的自定义函数之UDTF(User-Defined Table-Generating Functions)
Hadoop生态圈-Hive的自定义函数之UDTF(User-Defined Table-Generating Functions) 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任.
- Hadoop生态圈-Hive的自定义函数之UDF(User-Defined-Function)
Hadoop生态圈-Hive的自定义函数之UDF(User-Defined-Function) 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任.
- Hadoop生态圈-Hive函数
Hadoop生态圈-Hive函数 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任.
- Hive执行count函数失败,Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException)
Hive执行count函数失败 1.现象: 0: jdbc:hive2://192.168.137.12:10000> select count(*) from emp; INFO : Numb ...
- Hive UDAF开发之同时计算最大值与最小值
卷首语 前一篇文章hive UDAF开发入门和运行过程详解(转)里面讲过UDAF的开发过程,其中说到如果要深入理解UDAF的执行,可以看看求平均值的UDF的源码 本人在看完源码后,也还是没能十分理解里 ...
- hive UDAF开发入门和运行过程详解(转)
介绍 hive的用户自定义聚合函数(UDAF)是一个很好的功能,集成了先进的数据处理.hive有两种UDAF:简单和通用.顾名思义,简单的UDAF,写的相当简单的,但因为使用Java反射导致性能损失, ...
- [Hive_12] Hive 的自定义函数
0. 说明 UDF //user define function //输入单行,输出单行,类似于 format_number(age,'000') UDTF //user define table-g ...
随机推荐
- uitextview 最后一行遮挡
这只 uiscrollerview 的 setContentOffset CGRect line = [textView caretRectForPosition: textView.selected ...
- win8下安装ubuntu双系统
终于成功在win8下安装成功ubuntu13.10, 安装方法来源于http://forum.ubuntu.org.cn/viewtopic.php?t=446557 下面的文件是该楼主的将安装ubu ...
- 深入了解Qt(三)之元signal和slot
深入了解Qt主要内容来源于Inside Qt系列,本文做了部分删改,以便于理解.在此向原作者表示感谢! 在Qt 信号和槽函数这篇文章中已经详细地介绍了信号和槽的使用及注意事项.在这里对其使用方面的知识 ...
- form 表单默认的提示
<form method="get" action="" class="form"> <input type ...
- RGB颜色空间与YCbCr颜色空间的互转
在人脸检测中会用到YCbCr颜色空间,因此就要进行RGB与YCbCr颜色空间的转换.在下面的公式中RGB和YCbCr各分量的值的范围均为0-255. RGB转到YCbCr: float y= (col ...
- Oracle笔记 七、PL/SQL 异常处理
--异常处理 declare sNum number := 0; begin sNum := 5 / sNum; dbms_output.put_line(sNum); exception when ...
- JS常用的设计模式(8)——访问者模式
GOF官方定义: 访问者模式是表示一个作用于某个对象结构中的各元素的操作.它使可以在不改变各元素的类的前提下定义作用于这些元素的新操作.我们在使用一些操作对不同的 对象进行处理时,往往会根据不同的对象 ...
- Git 从服务器取得最新代码覆盖本地
第一种方法 git fetch --all git reset --hard origin/master git fetch下载远程最新的, 然后,git reset master分支重置 第二种方法 ...
- C#正则表达式语法规则详解
正则表达式通常包含字母文本(Literaltext)和元字符(metacharacter) 字母文本指的是普通文本如"abcde"可匹配字符串中任何包含"abcde&qu ...
- javaSE第二十六天
第二十六天 414 1:网络编程(理解) 414 (1)网络编程:用Java语言实现计算机间数据的信息传递和资源共享 414 (2)网络编程模型 414 (3)网络编程的三要素 ...