package com.xxx.udf;
import org.apache.hadoop.hive.ql.exec.UDF;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
public class DateProcessUDF extends UDF {
public String evaluate(String input) {
String start = input.split(",")[0];
String end = input.split(",")[1];
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd");
StringBuffer sb = new StringBuffer();
try {
long startTime = simpleDateFormat.parse(start).getTime();
long endTime = simpleDateFormat.parse(end).getTime();
while (startTime <= endTime) {
sb.append(simpleDateFormat.format(new Date(startTime)).toString() + ",");
startTime += 86400000;
}
return sb.toString().substring(0, sb.toString().length() - 1);
} catch (ParseException e) {
e.printStackTrace();
return null;
}
}
}
//select date_process(concat(to_date(start_time),',',to_date(end_date))) 获取开始时间到结束时间的日期序列,逗号分隔
//在使用行转列 获取每个日期
select cust_code,regexp_replace(trace_date,'-','') as trace_date
from
(
select cust_code,date_process(concat(to_date(start_time),',',to_date(end_time))) as trace_date_array
from db.xxx
where del_flag='1' and regexp_replace(to_date(start_time),'-','')<='20190515'
and '20190515'<=regexp_replace(to_date(end_time),'-','')
) tt1
lateral view explode (split(trace_date_array,',')) t as trace_date
where regexp_replace(trace_date,'-','')<='20190515'