代码如下:
package ***.com.json_udf;
import net.sf.json.JSONObject;
import org.apache.hadoop.hive.ql.exec.UDF;
import java.util.Iterator;
/**
* create by zhangxin 2018-07-25
*/
public class get_json_all_keys extends UDF {
public String evaluate(String json_str){
if(json_str.length() == 0){
json_str = "{}";
}
JSONObject json = JSONObject.fromObject(json_str);
Iterator it = json.keys();
String s = "";
while (it.hasNext()){
s += "," + it.next();
}
return s.length()==0?s:s.substring(1);
}
}
代码很简单,利用java的jsonapi获取所有的key,最终结果以逗号分号
使用方法:
先将udf的jar包放置在hdfs路径下,并执行add jar语句
create temporary function语句和业务sql语句需要在同一个session中执行udf才有起作用
ADD JAR hdfs:///***/json_udf.jar;
create temporary function get_json_all_keys as '***.com.json_udf.get_json_all_keys';
select get_json_all_keys(json) from db.table where dt = '2018-07-25';
pom文件:
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>netease.com</groupId>
<artifactId>json_udf</artifactId>
<version>1.0</version>
<dependencies>
<!-- Hadoop -->
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-core</artifactId>
<version>1.2.1</version>
</dependency>
<!-- Hive -->
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-metastore</artifactId>
<version>1.2.1</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-pdk</artifactId>
<version>0.10.0</version>
</dependency>
<dependency>
<groupId>net.sf.json-lib</groupId>
<artifactId>json-lib</artifactId>
<version>2.4</version>
<classifier>jdk15</classifier>
</dependency>
</dependencies>
</project>