一:UDF
1.自定义UDF
二:UDAF
2.UDAF
3.介绍AbstractGenericUDAFResolver
4.介绍GenericUDAFEvaluator
5.程序
package org.apache.hadoop.hive_udf; import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.udf.generic.AbstractGenericUDAFResolver;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFParameterInfo;
import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.AbstractPrimitiveWritableObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
import org.apache.hadoop.io.LongWritable; /**
*
* 需求:实现sum函数,支持int和double类型
*
*/ public class UdafProject extends AbstractGenericUDAFResolver{
public GenericUDAFEvaluator getEvaluator(GenericUDAFParameterInfo info)
throws SemanticException {
//判断参数是否是全部列
if(info.isAllColumns()){
throw new SemanticException("不支持*的参数");
} //判断是否只有一个参数
ObjectInspector[] inspector = info.getParameterObjectInspectors();
if(inspector.length != 1){
throw new SemanticException("参数只能有一个");
}
//判断输入列的数据类型是否为基本类型
if(inspector[0].getCategory() != ObjectInspector.Category.PRIMITIVE){
throw new SemanticException("参数必须为基本数据类型");
} AbstractPrimitiveWritableObjectInspector woi = (AbstractPrimitiveWritableObjectInspector) inspector[0]; //判断是那种基本数据类型 switch(woi.getPrimitiveCategory()){
case INT:
case LONG:
case BYTE:
case SHORT:
return new udafLong();
case FLOAT:
case DOUBLE:
return new udafDouble();
default:
throw new SemanticException("参数必须是基本类型,且不能为string等类型"); } } /**
* 对整形数据进行求和
*/
public static class udafLong extends GenericUDAFEvaluator{ //定义输入数据类型
public PrimitiveObjectInspector inputor; //实现自定义buffer
static class sumlongagg implements AggregationBuffer{
long sum;
boolean empty;
} //初始化方法
@Override
public ObjectInspector init(Mode m, ObjectInspector[] parameters)
throws HiveException {
// TODO Auto-generated method stub super.init(m, parameters);
if(parameters.length !=1 ){
throw new UDFArgumentException("参数异常");
}
if(inputor == null){
this.inputor = (PrimitiveObjectInspector) parameters[0];
}
//注意返回的类型要与最终sum的类型一致
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
} @Override
public AggregationBuffer getNewAggregationBuffer() throws HiveException {
// TODO Auto-generated method stub
sumlongagg slg = new sumlongagg();
this.reset(slg);
return slg;
} @Override
public void reset(AggregationBuffer agg) throws HiveException {
// TODO Auto-generated method stub
sumlongagg slg = (sumlongagg) agg;
slg.sum=0;
slg.empty=true;
} @Override
public void iterate(AggregationBuffer agg, Object[] parameters)
throws HiveException {
// TODO Auto-generated method stub
if(parameters.length != 1){
throw new UDFArgumentException("参数错误");
}
this.merge(agg, parameters[0]); } @Override
public Object terminatePartial(AggregationBuffer agg)
throws HiveException {
// TODO Auto-generated method stub
return this.terminate(agg);
} @Override
public void merge(AggregationBuffer agg, Object partial)
throws HiveException {
// TODO Auto-generated method stub
sumlongagg slg = (sumlongagg) agg;
if(partial != null){
slg.sum += PrimitiveObjectInspectorUtils.getLong(partial, inputor);
slg.empty=false;
}
} @Override
public Object terminate(AggregationBuffer agg) throws HiveException {
// TODO Auto-generated method stub
sumlongagg slg = (sumlongagg) agg;
if(slg.empty){
return null;
}
return new LongWritable(slg.sum);
} } /**
* 实现浮点型的求和
*/
public static class udafDouble extends GenericUDAFEvaluator{ //定义输入数据类型
public PrimitiveObjectInspector input; //实现自定义buffer
static class sumdoubleagg implements AggregationBuffer{
double sum;
boolean empty;
} //初始化方法
@Override
public ObjectInspector init(Mode m, ObjectInspector[] parameters)
throws HiveException {
// TODO Auto-generated method stub super.init(m, parameters);
if(parameters.length !=1 ){
throw new UDFArgumentException("参数异常");
}
if(input == null){
this.input = (PrimitiveObjectInspector) parameters[0];
}
//注意返回的类型要与最终sum的类型一致
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
} @Override
public AggregationBuffer getNewAggregationBuffer() throws HiveException {
// TODO Auto-generated method stub
sumdoubleagg sdg = new sumdoubleagg();
this.reset(sdg);
return sdg;
} @Override
public void reset(AggregationBuffer agg) throws HiveException {
// TODO Auto-generated method stub
sumdoubleagg sdg = (sumdoubleagg) agg;
sdg.sum=0;
sdg.empty=true;
} @Override
public void iterate(AggregationBuffer agg, Object[] parameters)
throws HiveException {
// TODO Auto-generated method stub
if(parameters.length != 1){
throw new UDFArgumentException("参数错误");
}
this.merge(agg, parameters[0]);
} @Override
public Object terminatePartial(AggregationBuffer agg)
throws HiveException {
// TODO Auto-generated method stub
return this.terminate(agg);
} @Override
public void merge(AggregationBuffer agg, Object partial)
throws HiveException {
// TODO Auto-generated method stub
sumdoubleagg sdg =(sumdoubleagg) agg;
if(partial != null){
sdg.sum += PrimitiveObjectInspectorUtils.getDouble(sdg, input);
sdg.empty=false;
}
} @Override
public Object terminate(AggregationBuffer agg) throws HiveException {
// TODO Auto-generated method stub
sumdoubleagg sdg = (sumdoubleagg) agg;
if (sdg.empty){
return null;
}
return new DoubleWritable(sdg.sum);
} } }
6.打成jar包
并放入路径:/etc/opt/datas/
7.添加jar到path
格式:
add jar linux_path;
即:
add jar /etc/opt/datas/af.jar
8.创建方法
create temporary function af as 'org.apache.hadoop.hive_udf.UdafProject';
9.在hive中运行
select sum(id),af(id) from stu_info;
三:UDTF
1.UDTF
2.程序
package org.apache.hadoop.hive.udf; import java.util.ArrayList; import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; public class UDTFtest extends GenericUDTF { @Override
public StructObjectInspector initialize(StructObjectInspector argOIs)
throws UDFArgumentException {
// TODO Auto-generated method stub
if(argOIs.getAllStructFieldRefs().size() != 1){
throw new UDFArgumentException("参数只能有一个");
}
ArrayList<String> fieldname = new ArrayList<String>();
fieldname.add("name");
fieldname.add("email");
ArrayList<ObjectInspector> fieldio = new ArrayList<ObjectInspector>();
fieldio.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
fieldio.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector); return ObjectInspectorFactory.getStandardStructObjectInspector(fieldname, fieldio);
} @Override
public void process(Object[] args) throws HiveException {
// TODO Auto-generated method stub
if(args.length == 1){
String name = args[0].toString();
String email = name + "@ibeifneg.com";
super.forward(new String[] {name,email});
}
} @Override
public void close() throws HiveException {
// TODO Auto-generated method stub
super.forward(new String[] {"complete","finish"});
} }
3.同样的步骤
4.在hive中运行
select tf(ename) as (name,email) from emp;