Hadoop之Hive UDAF TopN函数实现
public class GenericUDAFTopNRow extends AbstractGenericUDAFResolver {
@Override
public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
throws SemanticException {
if (parameters.length < 2) {
throw new UDFArgumentTypeException(parameters.length - 1,"At least two argument is expected.");
}
if(!(TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(parameters[0]) instanceof WritableIntObjectInspector)){
throw new UDFArgumentTypeException(0,"The first argument must be integer,"+TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(parameters[0]).getClass());
}
if (!ObjectInspectorUtils.compareSupported(TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(parameters[1]))) {
throw new UDFArgumentTypeException(1,"Cannot support comparison of map<> type or complex type containing map<>.");
}
return new TopNEvaluator();
}
static class TopNBuffer implements AggregationBuffer {
List<Object[]> container;
}
public static class TopNEvaluator extends GenericUDAFEvaluator {
int size;
String[] fieldNM;
ObjectInspector[] fieldOI;
ObjectInspector[] originalOI;
StandardListObjectInspector partialOI;
StandardStructObjectInspector partialElemOI;
@Override
public ObjectInspector init(Mode m, ObjectInspector[] parameters)
throws HiveException {
super.init(m, parameters);
if (m == Mode.PARTIAL1 || m == Mode.COMPLETE) {
this.originalOI = new ObjectInspector[parameters.length];
System.arraycopy(parameters, 0, this.originalOI, 0, parameters.length);
this.size = parameters.length-1;
this.fieldNM = new String[this.size];
this.fieldOI = new ObjectInspector[this.size];
for (int i = 0; i < this.size; i++) {
this.fieldNM[i] = "f" + i;
this.fieldOI[i] = ObjectInspectorUtils.getStandardObjectInspector(parameters[i+1]);
}
return ObjectInspectorFactory.getStandardListObjectInspector(ObjectInspectorFactory.getStandardStructObjectInspector(Arrays.asList(this.fieldNM),Arrays.asList(this.fieldOI)));
} else if (m == Mode.PARTIAL2 || m == Mode.FINAL) {
this.partialOI = (StandardListObjectInspector) parameters[0];
this.partialElemOI=(StandardStructObjectInspector) this.partialOI.getListElementObjectInspector();
List<? extends StructField> structFieldRefs = this.partialElemOI.getAllStructFieldRefs();
this.size = structFieldRefs.size();
this.fieldNM = new String[this.size];
this.fieldOI = new ObjectInspector[this.size];
for (int i = 0; i < this.size; i++) {
StructField sf = structFieldRefs.get(i);
this.fieldNM[i] = sf.getFieldName();
this.fieldOI[i] = sf.getFieldObjectInspector();
}
return ObjectInspectorUtils.getStandardObjectInspector(this.partialOI);
}
return null;
}
@Override
public AggregationBuffer getNewAggregationBuffer() throws HiveException {
TopNBuffer buffer = new TopNBuffer();
reset(buffer);
return buffer;
}
@Override
public void reset(AggregationBuffer agg) throws HiveException {
TopNBuffer buffer = (TopNBuffer) agg;
buffer.container = new LinkedList<Object[]>();
}
@Override
public void iterate(AggregationBuffer agg, Object[] parameters)
throws HiveException {
/*如果查询结果为空,不作处理*/
if(isEmptySet(agg,parameters)){
return;
}
TopNBuffer buffer = (TopNBuffer) agg;
int n = ((WritableIntObjectInspector)this.originalOI[0]).get(parameters[0]);
int s = buffer.container.size();
if(s < n){
Object[] elemVal = new Object[this.size];
for (int j = 0; j < this.size; j++) {
elemVal[j] = ObjectInspectorUtils.copyToStandardObject(parameters[j+1],this.originalOI[j+1]);
}
buffer.container.add(elemVal);
/*make sure the size should be n*/
while(buffer.container.size() < n){
buffer.container.add(new Object[this.size]);
}
}else{
for(int i = 0;i < s;i++){
if (ObjectInspectorUtils.compare(buffer.container.get(i)[0],this.fieldOI[0], parameters[1], this.originalOI[1]) < 0) {
Object[] elemVal = new Object[this.size];
for(int j=0;j<this.size;j++){
elemVal[j] = ObjectInspectorUtils.copyToStandardObject(parameters[j+1],this.originalOI[j+1]);
}
buffer.container.add(i, elemVal);
break;
}
}
/*make sure the size should be n*/
while(buffer.container.size() > n){
buffer.container.remove(n);
}
}
}
@Override
public Object terminatePartial(AggregationBuffer agg)
throws HiveException {
TopNBuffer buffer = (TopNBuffer) agg;
return buffer.container.isEmpty()?null:buffer.container;
}
@Override
public void merge(AggregationBuffer agg, Object partial) {
/*如果查询结果为空,不作处理*/
if(isEmptySet(agg,partial)){
return;
}
TopNBuffer buffer = (TopNBuffer) agg;
List<?> listVal = this.partialOI.getList(partial);
final int cn = Math.max(buffer.container.size(), listVal.size());
List<Object[]> values = new LinkedList<Object[]>();
for(Object elemObj:listVal){
List<Object> elemVal=this.partialElemOI.getStructFieldsDataAsList(elemObj);
Object[] value=new Object[this.size];
for(int i=0,n=elemVal.size();i<n;i++){
value[i]=ObjectInspectorUtils.copyToStandardObject(elemVal.get(i), this.fieldOI[i]);
}
values.add(value);
}
buffer.container=mergeSortNotNull(buffer.container, values);
while(buffer.container.size()<cn){
buffer.container.add(new Object[this.size]);
}
while(buffer.container.size() > cn){
buffer.container.remove(cn);
}
}
@Override
public Object terminate(AggregationBuffer agg) throws HiveException {
TopNBuffer buffer = (TopNBuffer) agg;
return buffer.container.isEmpty()?null:buffer.container;
}
private List<Object[]> mergeSortNotNull(List<Object[]> list1, List<Object[]> list2){
List<Object[]> result=new LinkedList<Object[]>();
int i1=0, i2=0, n1=list1.size(), n2=list2.size();
while(i1<n1 && i2<n2){
if(list1.get(i1)[0]==null){
i1++;
continue;
}
if(list2.get(i2)[0]==null){
i2++;
continue;
}
int cp = ObjectInspectorUtils.compare(list1.get(i1)[0],this.fieldOI[0], list2.get(i2)[0], this.fieldOI[0]);
if(cp > 0){
result.add(list1.get(i1));
i1++;
}else if(cp<0){
result.add(list2.get(i2));
i2++;
}else{
result.add(list1.get(i1));
i1++;
i2++;
}
}
while(i1<n1){
if(list1.get(i1)[0]==null){
i1++;
continue;
}
result.add(list1.get(i1));
i1++;
}
while(i2<n2){
if(list2.get(i2)[0]==null){
i2++;
continue;
}
result.add(list2.get(i2));
i2++;
}
return result;
}
private boolean isEmptySet(AggregationBuffer agg, Object[] parameters){
if(agg==null || parameters==null){
return true;
}else{
for(int i=0; i<parameters.length; i++){
if(parameters[i]!=null){
return false;
}
}
return true;
}
}
private boolean isEmptySet(AggregationBuffer agg, Object parameter){
return (agg==null) || (parameter==null);
}
}
}
Hadoop之Hive UDAF TopN函数实现的更多相关文章
- Hadoop生态圈-Hive的自定义函数之UDAF(User-Defined Aggregation Function)
Hadoop生态圈-Hive的自定义函数之UDAF(User-Defined Aggregation Function) 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任.
- Hadoop生态圈-hive编写自定义函数
Hadoop生态圈-hive编写自定义函数 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任.
- Hadoop生态圈-Hive的自定义函数之UDTF(User-Defined Table-Generating Functions)
Hadoop生态圈-Hive的自定义函数之UDTF(User-Defined Table-Generating Functions) 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任.
- Hadoop生态圈-Hive的自定义函数之UDF(User-Defined-Function)
Hadoop生态圈-Hive的自定义函数之UDF(User-Defined-Function) 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任.
- Hadoop生态圈-Hive函数
Hadoop生态圈-Hive函数 作者:尹正杰 版权声明:原创作品,谢绝转载!否则将追究法律责任.
- Hive执行count函数失败,Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException)
Hive执行count函数失败 1.现象: 0: jdbc:hive2://192.168.137.12:10000> select count(*) from emp; INFO : Numb ...
- Hive UDAF开发之同时计算最大值与最小值
卷首语 前一篇文章hive UDAF开发入门和运行过程详解(转)里面讲过UDAF的开发过程,其中说到如果要深入理解UDAF的执行,可以看看求平均值的UDF的源码 本人在看完源码后,也还是没能十分理解里 ...
- hive UDAF开发入门和运行过程详解(转)
介绍 hive的用户自定义聚合函数(UDAF)是一个很好的功能,集成了先进的数据处理.hive有两种UDAF:简单和通用.顾名思义,简单的UDAF,写的相当简单的,但因为使用Java反射导致性能损失, ...
- [Hive_12] Hive 的自定义函数
0. 说明 UDF //user define function //输入单行,输出单行,类似于 format_number(age,'000') UDTF //user define table-g ...
随机推荐
- centos7安装tftp服务器
1.安装 su #进入root yum install xinetd yum install tftp yum install tftp-server 2.配置tftp vim /etc/xinetd ...
- SQL where 1=1的作用
浅谈where 1=1 1.简单理解的话where 1=1 永真, where 1<>1 永假 2.1<>1 的用处: 用于只取结构不取数据的场合 例如: ...
- spring HibernateValidator 验证 子类不起作用
spring HibernateValidator 验证 子类不起作用,在要验证的子类前加上@Valid即可. public class UserInfo { private int Id; @Val ...
- ubuntu下,apt的参数使用,很实用呦
ubuntu下apt-get 命令参数 常用的APT命令参数 apt-cache search package 搜索包 apt-cache show package 获取包的相关信息,如说明.大小.版 ...
- ax 的错误处理范例
#OCCRetryCount ; try { ttsbegin; //example as insert or update or delete record ttscommit; } catch(E ...
- MySQL初夜(乱码问题,命令行客户端使用)
一.乱码问题 装好MySQL,并且将数据从SQLServer导入到MySQL之后,程序一直报错. 解决方案: 首先,输入命令: show variables like "character_ ...
- 在ASP.NET开始执行HTTP请求的处理程序之前
using Dscf.Client.Web.Class; using Dscf.Client.Web.DscfService; using Dscf.Client.Web.Handler; using ...
- Visual studio 2013 Team Foundation Server TFS2013 设置签出独占锁
摘自: http://www.cnblogs.com/52XF/p/4239056.html 以备自查 如侵权,请告知
- 字符串匹配KMP算法
1. 字符串匹配的KMP算法 2. KMP算法详解 3. 从头到尾彻底理解KMP
- html5,html5教程
html5,html5教程 1.向后兼容 HTML5是这样被定义的:能向后兼容目前UA处理内容的方式.为了让语言更简单,一些老的元素和Attribute被舍弃.比如一些纯粹用于展现的元素(译注:即非语 ...