package com.ir.stat.spark.service;


import com.ir.stat.spark.bean.DatasourceStat;
import com.ir.stat.spark.bean.FileStat;
import com.ir.stat.spark.bean.ResourceDirStat;
import com.ir.stat.spark.client.GetClient;
import com.ir.stat.spark.statistics.StatisticsFacade;
import com.ir.stat.spark.utils.HdfsUtil;
import com.ir.stat.spark.utils.JsonBuilder;
import com.ir.stat.spark.utils.MD5;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.RemoteIterator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.util.StopWatch;

import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;

@Component
public class ResourceStatService {

    @Value("${woven.server.url}")
    private String serverUrl;

    @Value("${woven.server.token}")
    private String serverToken;

    @Value("${resource.update.dataset.name}")
    private String datasetName;


    @Value("${resource.update.ext.dataset.name}")
    private String extDatasetName;

    @Value("${resource.update.datasource.name}")
    private String datasourceName;

    @Value("${resource.db.path:/tmp/stat.map.db}")
    private String dbPath;

    @Value("${resource.update.file.path}")
    private String statRootPath;

    @Autowired
    private StorageService storageService;

    private ThreadLocal<GetClient> threadLocalGetClient = new ThreadLocal<GetClient>() {
        @Override
        protected GetClient initialValue() {
            return new GetClient(serverUrl);
        }
    };

    class CountBean {
        long size;
        long records;
        int count;

        public CountBean(long size, long records, int count) {
            this.size = size;
            this.records = records;
            this.count = count;
        }

        public long getSize() {
            return size;
        }

        public void setSize(long size) {
            this.size = size;
        }

        public long getRecords() {
            return records;
        }

        public void setRecords(long records) {
            this.records = records;
        }

        public int getCount() {
            return count;
        }

        public void setCount(int count) {
            this.count = count;
        }
    }


    public CountBean count(ResourceDirStat rds, boolean countDatasetWithSpark) {
        System.out.println("counting rds " + rds.getName());
        final AtomicLong tSize = new AtomicLong();
        final AtomicLong tRecords = new AtomicLong();
        final AtomicInteger tCount = new AtomicInteger();
        for (ResourceDirStat _rds : rds.getChildren()) {
            CountBean _cdb = count(_rds, countDatasetWithSpark);
            tSize.addAndGet(_cdb.getSize());
            tRecords.addAndGet(_cdb.getRecords());
            tCount.addAndGet(_cdb.getCount());
        }
        if (rds.getDatasets() != null) {
            rds.getDatasets().parallelStream().forEach((dataset) -> {
                String threadName = Thread.currentThread().getName();
                System.out.println(threadName + ": counting dataset " + dataset.getName());
                long size = 0l;
                long records = 0l;
                if (countDatasetWithSpark) {
                    System.out.println(threadName + ": counting dataset " + dataset.getName() + " records with spark ..");
                    try {
                        String tableName = "TB_" + System.currentTimeMillis();
                        String fileFormat = dataset.getFormat().toUpperCase();
                        String filePath = dataset.getPath();
                        Map<String, String> formatOptions = new HashMap<>();
                        formatOptions.put("sep", ",");
                        StatisticsFacade.doAnalyze(tableName, new HashMap(), filePath, fileFormat, formatOptions, false, "default");
                        String json = StatisticsFacade.getAnalyzeResult(tableName, "default");//  {"sizeInBytes":33935294,"rowCount":255783,"colStats":{}}
                        System.err.println(threadName + ": dataset " + dataset.getName() + " spark result json = " + json);
                        HashMap stat = JsonBuilder.getInstance().fromJson(json, HashMap.class);
                        size = Long.valueOf(stat.getOrDefault("sizeInBytes", 0).toString());
                        records = Long.valueOf(stat.getOrDefault("rowCount", 0).toString());
                    } catch (Exception e) {
                        e.printStackTrace();
                    }
                } else {
                    if (StringUtils.isNotBlank(dataset.getPath())) {
                        FileStatus status = HdfsUtil.getInstance().status(dataset.getPath());
                        if (status != null) {
                            size = getDatasetSize(dataset.getPath());
                        }
                    }
                    System.out.println(threadName + ": fetching dataset " + dataset.getName() + " records with rest ..");
                    records = getDatasetRecords(dataset.getId());
                }
                dataset.setSize(size);
                dataset.setRecords(records);
                System.out.println(threadName + ": dataset " + dataset.getName() + " count done, records =  " + records + ", size = " + size);
                tSize.addAndGet(size);
                tRecords.addAndGet(records);
                tCount.addAndGet(1);
            });
        }
        if (rds.getDatasources() != null) {
            for (DatasourceStat datasource : rds.getDatasources()) {
                tCount.addAndGet(1);
            }
        }
        rds.setSize(tSize.get());
        rds.setRecords(tRecords.get());
        long _records = storageService.getLong(StorageService.INDEX_RECORDS_MAPPING, rds.getId());
        rds.setDeltaRecords(rds.getRecords() - _records);
        rds.setCount(tCount.get());
        System.out.println("rds " + rds.getName() + " count done!");
        return new CountBean(tSize.get(), tRecords.get(), tCount.get());
    }

    private long getDatasetSize(String path) {
        long tSize = 0l;
        try {
            RemoteIterator<LocatedFileStatus> iterator = HdfsUtil.getInstance().list(path, true);
            while (iterator.hasNext()) {
                LocatedFileStatus fileStatus = iterator.next();
                tSize += fileStatus.getLen();
            }
        } catch (Throwable throwable) {
            throwable.printStackTrace();
        }
        return tSize;
    }

    private long getDatasetRecords(String datasetId) {
        Map params = new HashMap();
        params.put("datasetId", datasetId);
        String resp = threadLocalGetClient.get().doGet("/woven/statistics/resource/dataset/records?" + UrlEncodeUTF8(params), serverToken).getResponseText();
        if (StringUtils.isNotBlank(resp)) {
            return Long.valueOf(resp);
        }
        return 0l;
    }

    private void updateRecordsMapping(ResourceDirStat rds) {
        storageService.put(StorageService.INDEX_RECORDS_MAPPING, rds.getId(), rds.getRecords());
        for (ResourceDirStat child : rds.getChildren()) {
            updateRecordsMapping(child);
        }
    }

    @Scheduled(cron = "${resource.update.trigger}")
    public void updateDataset() {
        System.out.println("updating datasets ...");
        try {
            Map params = new HashMap();
            params.put("name", datasetName);
            params.put("type", "dataset_dir");
            StopWatch sw = new StopWatch();
            sw.start();
            String resp = threadLocalGetClient.get().doGet("/woven/statistics/resource/tree?" + UrlEncodeUTF8(params), serverToken).getResponseText();
            if (resp != null) {
                ResourceDirStat rds = JsonBuilder.getInstance().fromJson(resp, ResourceDirStat.class);
                CountBean cdb = count(rds, false);
                rds.setRecords(cdb.getRecords());
                rds.setSize(cdb.getSize());
                rds.setCount(cdb.getCount());
                rds.setStatTime(new Date());
                long _records = storageService.getLong(StorageService.INDEX_RECORDS_MAPPING, rds.getId());
                rds.setDeltaRecords(rds.getRecords() - _records);
                updateRecordsMapping(rds);
                storageService.put(StorageService.INDEX_RESOURCES, "datasets", JsonBuilder.getInstance().toJson(rds));
                sw.stop();
                System.out.println("datasets update done, count: " + rds.getCount() + ", records: " + rds.getRecords() + ", size: " + rds.getSize() + ", use time: " + sw.getLastTaskInfo().getTimeSeconds());

                storageService.flushToDisk();
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }


    @Scheduled(cron = "${resource.update.trigger}")
    public void updateExtDataset() {
        System.out.println("updating ext datasets ...");
        try {
            Map params = new HashMap();
            params.put("name", extDatasetName);
            params.put("type", "dataset_dir");
            StopWatch sw = new StopWatch();
            sw.start();
            String resp = threadLocalGetClient.get().doGet("/woven/statistics/resource/tree?" + UrlEncodeUTF8(params), serverToken).getResponseText();
            if (resp != null) {
                ResourceDirStat rds = JsonBuilder.getInstance().fromJson(resp, ResourceDirStat.class);
                CountBean cdb = count(rds, true);
                rds.setRecords(cdb.getRecords());
                rds.setSize(cdb.getSize());
                rds.setCount(cdb.getCount());
                rds.setStatTime(new Date());
                long _records = storageService.getLong(StorageService.INDEX_EXT_RECORDS_MAPPING, rds.getId());
                rds.setDeltaRecords(rds.getRecords() - _records);
                updateRecordsMapping(rds);
                storageService.put(StorageService.INDEX_RESOURCES, "ext_datasets", JsonBuilder.getInstance().toJson(rds));
                sw.stop();
                System.out.println("ext datasets update done, count: " + rds.getCount() + ", records: " + rds.getRecords() + ", size: " + rds.getSize() + ", use time: " + sw.getLastTaskInfo().getTimeSeconds());

                storageService.flushToDisk();
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    @Scheduled(cron = "${resource.update.trigger}")
    public void updateDatesource() {
        System.out.println("updating datasources ...");
        StopWatch sw = new StopWatch();
        sw.start();
        try {
            Map params = new HashMap();
            params.put("name", datasourceName);
            params.put("type", "datasource_dir");
            String resp = threadLocalGetClient.get().doGet("/woven/statistics/resource/tree?" + UrlEncodeUTF8(params), serverToken).getResponseText();
            if (resp != null) {
                ResourceDirStat rds = JsonBuilder.getInstance().fromJson(resp, ResourceDirStat.class);
                CountBean cdb = count(rds, false);
                rds.setCount(cdb.getCount());
                rds.setStatTime(new Date());
                storageService.put(StorageService.INDEX_RESOURCES, "datasources", JsonBuilder.getInstance().toJson(rds));
                sw.stop();
                System.out.println("datasources update done, count: " + rds.getCount() + ", records: " + rds.getRecords() + ", size: " + rds.getSize() + ", use time: " + sw.getLastTaskInfo().getTimeSeconds());

                storageService.flushToDisk();
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    @Scheduled(cron = "${resource.update.trigger}")
    public void updateFile() {
        try {
            System.out.println("updating files ...");
            StopWatch sw = new StopWatch();
            sw.start();
            ResourceDirStat root = new ResourceDirStat();
            root.setId(UUID.randomUUID().toString());
            root.setName(statRootPath);
            countFiles(root);
            storageService.put(StorageService.INDEX_RESOURCES, "files", JsonBuilder.getInstance().toJson(root));
            sw.stop();
            System.out.println("files update done, count: " + root.getCount() + ", size: " + root.getSize() + ", use time: " + sw.getLastTaskInfo().getTimeSeconds());

            storageService.flushToDisk();
        } catch (Throwable throwable) {
            throwable.printStackTrace();
        }
    }

    public ResourceDirStat countFiles(ResourceDirStat rds) throws Throwable {
        FileStatus[] arr = HdfsUtil.getInstance().listStatus(rds.getName());
        List<FileStat> files = new ArrayList();
        long tSize = 0l;
        int tCount = 0;
        for (FileStatus fs : arr) {
            String _path = fs.getPath().toUri().getPath();
            if (fs.isDirectory()) {
                ResourceDirStat _rds = new ResourceDirStat();
                _rds.setId(MD5.get(_path));
                _rds.setName(_path);
                countFiles(_rds);
                rds.getChildren().add(_rds);
            } else {
                files.add(new FileStat(_path, fs.getLen(), fs.getModificationTime()));
                tCount++;
                tSize += fs.getLen();
            }
        }
        for (ResourceDirStat _rds : rds.getChildren()) {
            tCount += _rds.getCount();
            tSize += _rds.getSize();
        }
        rds.setSize(tSize);
        rds.setCount(tCount);
        if (files.size() > 0) {
            rds.setFiles(files);
        }
        return rds;
    }

    public ResourceDirStat getStat(String resType) {
        ResourceDirStat ret = new ResourceDirStat();
        String str = storageService.getString(StorageService.INDEX_RESOURCES, resType);
        if (str != null) {
            ret = JsonBuilder.getInstance().fromJson(str, ResourceDirStat.class);
        }
        return ret;
    }

    public ResourceDirStat getStat(String resType, String resId) {
        ResourceDirStat rds = getStat(resType);
        if (rds != null) {
            ResourceDirStat target = findResourceDirStatById(resId, rds);
            return target;
        }
        return null;
    }

    private ResourceDirStat findResourceDirStatById(String targetId, ResourceDirStat rds) {
        ResourceDirStat found = null;
        if (targetId.equalsIgnoreCase(rds.getId())) {
            found = rds;
        } else {
            for (ResourceDirStat r : rds.getChildren()) {
                found = findResourceDirStatById(targetId, r);
                if (found != null) {
                    break;
                }
            }
        }
        return found;
    }

    private static String UrlEncodeUTF8(Map<?, ?> map) {
        StringBuilder sb = new StringBuilder();
        for (Map.Entry<?, ?> entry : map.entrySet()) {
            if (sb.length() > 0) {
                sb.append("&");
            }
            sb.append(String.format("%s=%s",
                    UrlEncodeUTF8(entry.getKey().toString()),
                    UrlEncodeUTF8(entry.getValue().toString())
            ));
        }
        return sb.toString();
    }

    private static String UrlEncodeUTF8(String s) {
        try {
            return URLEncoder.encode(s, "UTF-8");
        } catch (UnsupportedEncodingException e) {
            throw new UnsupportedOperationException(e);
        }
    }


}
