在创建自定义函数时,需要将jar包全部部署到hive server节点的所有机器的用户定义的udf参数目录
<property>hive.reloadable.aux.jars.path.jars.path.property</property>
<value>/usr/lib/hive/lib</value>
代码调用
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.io.Text;
/**
* /usr/lib/hive/lib
*
* hive.reloadable.aux.jars.path.jars.path.property+目录
*/
public class Test extends GenericUDF {
public static void main(String[] args) throws HiveException {
String s1 = "1";
String s2 ="2";
String s3 = "3";
Test t = new Test();
Object evaluate = t.evaluate(new DeferredObject[]{new DeferredJavaObject(s1), new DeferredJavaObject(s2), new DeferredJavaObject(s3)});
System.out.println(evaluate.toString());
}
//udf中参数校验
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 3) {
throw new UDFArgumentException("The function ARRAY_CONTAINS accepts 3 arguments.");
}
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
//udf中需要传递的参数
public Object evaluate(DeferredObject[] arguments) throws HiveException {
String result = new String();
String s1 = arguments[0].get().toString();
String s2 = arguments[1].get().toString();
String s3 = arguments[2].get().toString();
result = s1 + s2+s3;
return new Text(result);
}
//报错是报错内容
public String getDisplayString(String[] strings) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < strings.length; i++) {
sb.append(strings[i]+"\t");
}
return sb.toString();
}
}
创建永久函数
create function str_add as 'com.test.Test' using jar 'hdfs://namespace/user/hive/lib/test.jar