package com.sg.java.apps;

import com.google.common.collect.Lists;
import com.sg.java.security.SecurityPrepare;
import com.sg.java.util.HdfsUtils;
import com.sg.java.util.SqlUtils;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.sql.DataFrameReader;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.sql.Connection;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Iterator;

public class SparkReadFile {

    private static final Logger log = LoggerFactory.getLogger(SparkReadFile.class);

    public static void main(String[] args) throws Exception {
        log.info("配置kerberos认证：");
        try {
            SecurityPrepare.cqEcsKerberosLogin2();
        } catch (Exception e) {
            e.printStackTrace();
        }
        String hdfsDir = args[0];
        hdfsDir = HdfsUtils.uriPathPrefix + "/" + hdfsDir;
        log.info("hdfsDir:{}", hdfsDir);
        //创建一个配置类SparkConf，然后创建一个SparkContext
        SparkSession spark = SparkSession
                .builder()
                .appName(SparkReadFile.class.getSimpleName())
                .getOrCreate();

        DataFrameReader   read = spark.read();
        JavaRDD<String>   data = read.textFile(hdfsDir).javaRDD();
        JavaRDD<String[]> map  = data.map((Function<String, String[]>) s -> s.split("\t"));

        map.foreachPartition(new VoidFunction<Iterator<String[]>>() {

            final Connection pgConn = SqlUtils.newCqPgConn();
            String ds;
            Date date;
            final Logger log = LoggerFactory.getLogger(System.getProperty("user.dir"));

            @Override
            public void call(Iterator<String[]> iterator) throws Exception {
                while (iterator.hasNext()) {
                    String[] sample = iterator.next();
                    ds   = sample[97];
                    date = new SimpleDateFormat("yyyyMMdd").parse(ds);
                    log.info("ds:{}", ds);
                    break;
                }
                int affected = ReadHdfsFile.doGetResult(
                        Lists.newArrayList(iterator),
                        pgConn,
                        ds,
                        date,
                        null,
                        null);
                log.info("已插入:{}", affected);
                pgConn.close();
            }
        });


    }

}
