package com.yomob.client;

import com.google.common.collect.Lists;
import org.apache.commons.collections.CollectionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.util.Collections;
import java.util.List;

import static org.apache.spark.sql.functions.col;

public class SparkClient {
    private static final Logger LOGGER = LoggerFactory.getLogger(SparkClient.class);
    private static final Object[] clickedCounter = {"ad_adclick", "cp_adclick"};
    private String[] dropNullCol = new String[]{"clickedAd"};
    private SparkSession sparkSession;
    public SparkClient(SparkSession sparkSession) {
        this.sparkSession = sparkSession;
    }
    public SparkSession getSparkSession() {
        return sparkSession;
    }
    private List<String> getFiles(String hdfsPath, LocalDate startDate, LocalDate endDate){
        FileSystem fs;
        Configuration hadoopConfiguration = getSparkSession().sparkContext().hadoopConfiguration();
        //for local test
        //hadoopConfiguration.set("fs.defaultFS","hdfs://localhost:9000");

        List<String> files = Lists.newArrayList();
        try {
            fs = FileSystem.get(hadoopConfiguration);
            LocalDate date = startDate;
            while (!date.isAfter(endDate)) {
                files.addAll(getFileNames(fs, hdfsPath, date));
                date = date.plusDays(1);
            }
            fs.close();
        } catch (IOException e) {
            e.printStackTrace();
            return null;
        }
        if (CollectionUtils.isEmpty(files)) {
            return null;
        }
        LOGGER.info(">>> HDFS Paths: {}", files);
        return files;
    }
    public Dataset<Row> getAdxClickDataset(String hdfsPath, LocalDate startDate, LocalDate endDate) {
        List<String> files = getFiles(hdfsPath,startDate,endDate);
        return getSparkSession().read().json(files.toArray(new String[files.size()])).filter(col("counterid")
                .isin(clickedCounter))
                .select("udid", "ext.video_url_md5").withColumnRenamed("udid", "deviceID")
                .withColumnRenamed("video_url_md5", "clickedAd").na().drop(dropNullCol).distinct();
    }
    public Dataset<Row> getBaseDataset(String hdfsPath, LocalDate startDate, LocalDate endDate) {
        List<String> files = getFiles(hdfsPath,startDate,endDate);
        return getSparkSession().read().json(files.toArray(new String[files.size()]));
    }
    private List<String> getFileNames(FileSystem fs, String hdfsPath, LocalDate date) throws IOException {
        List<String> files = Lists.newArrayList();
        String datePath = date.format(DateTimeFormatter.ofPattern("yyyyMMdd"));
        Path directory = new Path(hdfsPath, datePath);
        if (!fs.exists(directory)) {
            return Collections.emptyList();
        }
        FileStatus[] fileStatus = fs.listStatus(directory);
        for (FileStatus status : fileStatus) {
            if (!status.getPath().toString().endsWith(".tmp")) {
                files.add(status.getPath().toString());
            }
        }
        return files;
    }
}