1.準備編寫udf環境
引入依賴
<dependencies>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-common -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>3.1.3</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>3.1.2</version>
</dependency>
</dependencies>
2.編寫udf,繼承UDF,實現evaluate方法
package bigdata.hive;
import org.apache.hadoop.hive.ql.exec.UDF;
public class UdfDemo extends UDF{
public String evaluate(String ip){
String[] datas = ip.split("\\.");//按.切分轉義
StringBuffer sb = new StringBuffer();
//前邊補三個0
for(String s:datas){
s="000"+s;
s=s.substring(s.length()-3);
sb.append(s).append(".");
}
return sb.substring(0,sb.length()-1);
}
//本地測試
public static void main(String[] args) {
UdfDemo udfDemo = new UdfDemo();
String evaluate =udfDemo.evaluate("2.3.5.3");
System.out.println(evaluate);
}
}
3.打包udf
執行 maven install命令 進行打包,執行成功出現 ,如下圖
4.上傳udf jar 包到 hdfs
hdfs dfs -put bigdata-0.0.1-SNAPSHOT.jar /user/hive/hiveFunction/
5.啓動 hive 創建註冊udf函數
create function UdfDemo as 'bigdata.hive.UdfDemo' using jar 'hdfs://master:9000/user/hive/hiveFunction/bigdata-0.0.1-SNAPSHOT.jar';
6.使用udf函數
測試 udf
select UdfDemo("2.3.5.3");