package cn.lagou.dw.flume.interceptor.hive.udf;

import cn.lagou.dw.flume.interceptor.hdfs.ReadHDFSText;
import org.apache.hadoop.hive.ql.exec.UDF;
import org.junit.Test;

import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;

/*
 * @className JudgeDate
 * @description
 * @author admin
 * @date 2022/9/19 22:19
 * */
public class JudgeDate extends UDF {
    public int evaluate(String date) {
        List<String> list = ReadHDFSText.readTxtFile();
        Map<String,String> map = new HashMap<String,String>();
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
        for(String line :list){
            String[] detail = line.split(" ");
            String time = detail[0];
            String desc = detail[1];
            map.put(time, desc);
        }
        Calendar calendar = Calendar.getInstance();
        try {
            calendar.setTime(sdf.parse(date));
        } catch (ParseException e) {
            e.printStackTrace();
        }
        int day = calendar.get(Calendar.DAY_OF_WEEK);
        // 工作日
        if(map.containsKey(date)){
            String desc = map.get(date);
            if(desc.equals("1")){
                // 节假日
                return 1;
            }else{
                // 工作日
                return 2;
            }
        }else if(day!=1 && day!=7){
            // 工作日
            return 2;
        }else{
            return 0;
        }
    }

}
