package com.bosc.automationui.service;

import com.bosc.automationui.entity.MonitorHistroy;
import com.bosc.automationui.repository.MonitorInfoRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.Date;
import java.util.*;

@Service
public class HDFSMonitorService {
    class MyLRU<K, V> extends LinkedHashMap<K, V> {

        private final int MAX_ENTRIES;

        public MyLRU(int initialCapcity, int threshhold, boolean accessOrder) {
            super(initialCapcity, 0.75f, accessOrder);
            this.MAX_ENTRIES = threshhold;
        }

        protected boolean removeEldestEntry(Map.Entry<K, V> eldest) {
            return size() > MAX_ENTRIES;
        }
    }

    /**
     * The lru cache that stores formerly queried tables at the same day,since on production env each table
     * has thousand of partitions.
     */
    private MyLRU<String, Object> cache = new MyLRU<>(200, 400, true);
    final String baseCmd = "hadoop fs -du";

    java.sql.Date datetime;

    @Autowired
    MonitorInfoRepository monitorInfoRepository;

    public void setDatetime(Date datetime) {
        this.datetime = datetime;
    }

    /**
     * Run a shell command through java process and get the return in the form of a List
     *
     * @param command The shell command to be executed
     * @return A List of string containing the
     */
    public List<String> getSizeInfoRaw(String command) {
        final String defaultCmd = baseCmd + " /user/hive/warehouse/shdata.db/ " +
                "/user/hive/warehouse/sdata.db/ " + "/user/hive/warehouse/shdata.db/*/* " + "/user/hive/warehouse/sdata.db/*/*";
        String cmd = command == null ? defaultCmd : baseCmd + command;
        setDatetime(new java.sql.Date(System.currentTimeMillis()));
        Process process = null;
        List<String> processList = new ArrayList<>();
        try {
            process = Runtime.getRuntime().exec(cmd);
            BufferedReader input = new BufferedReader(new InputStreamReader(process.getInputStream()));
            String line = "";
            while ((line = input.readLine()) != null) {
                processList.add(line);
            }
            input.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
        return processList;
    }

    /**
     * Due to the following considerations, the entry api {@code getSizeInfo()}will just produce the summary of the data warehouse including
     * the disk size taken and number of empty partitions.
     * 1.It won't get info down to the granularily of table level for
     * query efficiency consideration.
     * 2. There are tens of thousands table in the data warehouse which is quite a waste of database space if all tables have their own records
     * at each check point.
     *
     * @param cmdLineReturns the string results return from the aforehead execution of shell command
     * @return A List of Entities of type {@link MonitorHistroy} where each line represents a record in the database table
     */
    public List<MonitorHistroy> processCmdLineResult(List<String> cmdLineReturns) {
        List<MonitorHistroy> result = new ArrayList<>();
        long[] numEmptyPartition = new long[]{-1, -1}, numPartitions = new long[]{-1, -1};
        for (String line : cmdLineReturns) {

        }
        String[] dbArray = new String[]{"shdat", "sdata"};
        for (int i = 0; i < dbArray.length; i++) {
            MonitorHistroy element = new MonitorHistroy.Builder().
                    setDate(datetime).
                    setDb(dbArray[i]).
                    setSize(Long.valueOf(cmdLineReturns.get(i).split(" ")[5])).
                    setNumEmptPart(numEmptyPartition[i]).
                    setNumPart(numPartitions[i]).build();
            result.add(element);
        }
        return result;
    }

    /**
     * Use the hdfs cli tool to get the disk info of hive databases and save them
     * in the oracle database for later info display.
     *
     * @return
     */
    public HashMap<String, Object> checkMonitorInfo() {
        HashMap<String, Object> responseJson = new HashMap<>();
        List<String> cmdlineReturns = getSizeInfoRaw(null);
        List<MonitorHistroy> elements = processCmdLineResult(cmdlineReturns);
        doSaveToDb(elements);
        responseJson.put("code", "0");
        responseJson.put("message", "success");
        return responseJson;
    }

    /**
     * Return the monitor info at the most recent checkpoint
     *
     * @return
     */
    public HashMap<String, Object> getMonitorHistory() {
        HashMap<String, Object> result = new HashMap<>();
        return result;
    }

    /**
     * Insert the generated monitoribng histories into the oracle database
     *
     * @param elements
     */
    public void doSaveToDb(List<MonitorHistroy> elements) {
        monitorInfoRepository.saveAll(elements);
    }

    /**
     * Query the info of the specific hive table in thhe specific databse,
     * returns the following info of the table:
     * 1. Size taken regardless the repolicas
     * 2. Number of empty partitions(or nearly empty)
     * 3. Number of small files(files noticably smaller than HDFS block size)
     * A cache exists for faster returning of the result if the same table is queried recently.
     * It the queried entity exists in the cache the result is return immediately, otherwise
     * call to HDFS is made.
     * @param database The database name the table belongs
     * @param table    The name of the table
     */
    public HashMap<String, Object> queryInfoByTable(String database, String table) {
        HashMap<String, Object> result = new HashMap<>();
        if (cache.containsKey(database + "," + table))
            result.put("data", cache.get(database + "," + table));
        else {
            String cmd = String.format("/user/hive/warehouse/", database, table);
            List<String> cmdReturns = getSizeInfoRaw(baseCmd + cmd);
        }
        return result;


    }


}

