package cn.itcast.batch;

import cn.itcast.batch.utils.ConfigLoader;
import cn.itcast.batch.utils.DateUtil;
import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.io.jdbc.JDBCInputFormat;
import org.apache.flink.api.java.io.jdbc.JDBCOutputFormat;
import org.apache.flink.api.java.typeutils.RowTypeInfo;
import org.apache.flink.types.Row;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Types;
import java.util.UUID;

/**
 * 计算汇总统计数据准确率
 * 计算总的正确数据数量和异常数据数量
 */
public class TotalDataRate extends JDBCFormatAbstract{
    private static final Logger logger = LoggerFactory.getLogger(TotalDataRate.class);

    //从hive中读取数据
    public JDBCInputFormat getHiveJDBCInputFormat(){
        //指定驱动名称
        String driverName = "org.apach.hive.jdbc.HiveDriver";
        String url = "jdbc:hive2://node03:10000/itcast_ods";
        String userName = "root";
        String password = "123456";
        String inputSql = "select srcTotalNum,errorTotalNum from " +
                "(select count(1) srcTotalNum from itcast_src) src," +
                "(select count(1) errorTotalNum from itcast_error) error;";
        //指定查询sql语句返回的字段信息
        TypeInformation[] typeInformations = {BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO};
        String[] colNames = {"srcTotalNum", "errorTotalNum"};
        RowTypeInfo rowTypeInfo = new RowTypeInfo(typeInformations, colNames);
        return getBatchJDBCInputFormat(driverName,url,userName,password,inputSql,rowTypeInfo);
    }

    /**
     * 通过flink jdbc写入数据到mysql中
     * @return
     */
    public JDBCOutputFormat getMysqlJDBCOutputFormat(){
        String driverName = ConfigLoader.getProperty("mysql.jdbc.driver");
        String url = ConfigLoader.getProperty("mysql.jdbc.url");
        String userName = ConfigLoader.getProperty("mysql.jdbc.user");
        String password = ConfigLoader.getProperty("mysql.jdbc.password");
        String outputSql = "insert into itcast_data_rate(series_no, src_total_num, error_src_total_num, data_accuracy, data_error_rate,process_date)\n" +
                " values(?,?,?,?,?,?);";

        int[] sqlType = new int[]{Types.VARCHAR,Types.BIGINT,Types.BIGINT,Types.FLOAT,Types.FLOAT,Types.VARCHAR};
        return getBatchJDBCOutputFormat(driverName,url,userName,password,outputSql,sqlType);
    }


    /**
     * 根据正确数据量和错误数据量计算正确率和错误率
     * @param hiveDataSet
     * @return
     */
    public DataSet<Row> convertHiveDataSource(DataSet<Row> hiveDataSet){
        return hiveDataSet.map(row -> {
            //获取正确数据的数据量
            long srcTotalNum = Long.parseLong(row.getField(0).toString());
            long errorTotalNum = Long.parseLong(row.getField(1).toString());
            //正确率
            long dataAccuracy = srcTotalNum/(srcTotalNum+errorTotalNum);

            Row resultRow = new Row(6);
            resultRow.setField(0, UUID.randomUUID().toString());
            resultRow.setField(1,srcTotalNum);
            resultRow.setField(2,errorTotalNum);
            resultRow.setField(3,dataAccuracy); //正确率
            resultRow.setField(4,1-dataAccuracy);//错误率
            resultRow.setField(5, DateUtil.getTodayDate()); //获取
            return resultRow;
        });
    }


}
