package com.xiehua.task.persistence.rocks;

import com.xiehua.task.job.pool.DelayJob;
import com.xiehua.task.job.pool.FixedJob;
import com.xiehua.task.job.pool.Job;
import com.xiehua.task.persistence.TaskPersistence;
import com.xiehua.task.prop.XiehuaConfig;
import com.xiehua.task.utils.StringUtils;
import org.apache.commons.pool2.impl.GenericObjectPool;
import org.rocksdb.*;
import org.rocksdb.util.SizeUnit;
import org.tio.utils.json.Json;

import java.io.File;
import java.io.IOException;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

import static com.xiehua.task.job.pool.Job.JobStatus.fail;
import static com.xiehua.task.job.pool.Job.JobStatus.success;
import static com.xiehua.task.persistence.rocks.RocksFactory.COLUMN_FAMILY_TASK_FAIL;
import static com.xiehua.task.persistence.rocks.RocksFactory.COLUMN_FAMILY_TASK_SUCCESS;


public class XeihuaPersitence implements TaskPersistence {

    private Map<String,ColumnFamilyHandle> columnFamilyHandles;

    private RocksDB rocksDB;

    public XeihuaPersitence(RocksDB rocksDB, Map<String,ColumnFamilyHandle> columnFamilyHandles) throws RocksDBException,IOException{
        this.columnFamilyHandles = columnFamilyHandles;
        this.rocksDB = rocksDB;
    }

    @Override
    public void persistence(Job job) throws Exception{
        if(job.getStatus().equals(success)){
            rocksDB.put(columnFamilyHandles.get(COLUMN_FAMILY_TASK_SUCCESS),job.getJobId().getBytes(), Json.toJson(job).getBytes());
        }
        if(job.getStatus().equals(fail)){
            rocksDB.put(columnFamilyHandles.get(COLUMN_FAMILY_TASK_FAIL),job.getJobId().getBytes(), Json.toJson(job).getBytes());
        }
    }

    @Override
    public Job queryJob(String taskId) throws Exception {
        byte[] bytes = rocksDB.get(columnFamilyHandles.get(COLUMN_FAMILY_TASK_SUCCESS),taskId.getBytes());
        if(bytes != null && bytes.length > 0) return Json.toBean(new String(bytes),Job.class);
        return null;
    }

    /***
     * 分页查询job
     * //TODO 此处还又问题~，默认不做分页,前端分页,只做按时间统计,其余字段后续实现
     *
     * @param query
     */
    @Override
    public Job.JobPage queryJob(Job.Query query) {
        Job.JobPage page = new Job.JobPage();
        //query fail
        RocksIterator iteratorFail = rocksDB.newIterator(columnFamilyHandles.get(COLUMN_FAMILY_TASK_FAIL));
        Integer failCount = 0;
        for (iteratorFail.seekToFirst(); iteratorFail.isValid(); iteratorFail.next()) {
            Job job = Json.toBean(new String(iteratorFail.value()),Job.class);
            if(job.getUpdateTime().isAfter(query.getStartTime()) && job.getUpdateTime().isBefore(query.getEndTime())){
                List<Job> list = page.getList();
                list.add(job);
                failCount++;
            }
        }
        //query success
        RocksIterator iteratorSuccess = rocksDB.newIterator(columnFamilyHandles.get(COLUMN_FAMILY_TASK_SUCCESS));
        Integer successCount = 0;
        for (iteratorSuccess.seekToFirst(); iteratorSuccess.isValid(); iteratorSuccess.next()) {
            Job job = Json.toBean(new String(iteratorSuccess.value()),Job.class);
            if(job.getUpdateTime().isAfter(query.getStartTime()) && job.getUpdateTime().isBefore(query.getEndTime())){
                List<Job> list = page.getList();
                list.add(job);
                successCount++;
            }
        }
        page.setTotalRows(failCount + successCount);
        return page;
    }

    /**
     * 查询成功的job数量
     *
     * @param start
     * @param end
     **/
    //TODO 待优化
    @Override
    public Integer countSuccessJob(LocalDateTime start, LocalDateTime end,String topic) {
        return countJob(columnFamilyHandles.get(COLUMN_FAMILY_TASK_SUCCESS),start,end,topic,Job.JobStatus.success);
    }

    /**
     * 查询失败的job数量
     *
     * @param start
     * @param end
     **/
    //TODO 待优化
    @Override
    public Integer countfailJob(LocalDateTime start, LocalDateTime end,String topic) {
        return countJob(columnFamilyHandles.get(COLUMN_FAMILY_TASK_FAIL),start,end,topic,Job.JobStatus.fail);
    }

    /**统计并过滤job**/
    private Integer countJob(ColumnFamilyHandle handle,LocalDateTime start, LocalDateTime end,String topic,Job.JobStatus status){
        RocksIterator iterator = rocksDB.newIterator(handle);
        Integer count = 0;
        for (iterator.seekToFirst(); iterator.isValid(); iterator.next()) {
            Job job = Json.toBean(new String(iterator.value()),Job.class);
            if(topic != null && job.getUpdateTime().isAfter(start) && job.getUpdateTime().isBefore(end) && job.getTopic().equals(topic) && job.getStatus().equals(status)) count++;
        }
        return count;
    }



    public Map<String, ColumnFamilyHandle> getColumnFamilyHandles() {
        return columnFamilyHandles;
    }

    public void setColumnFamilyHandles(Map<String, ColumnFamilyHandle> columnFamilyHandles) {
        this.columnFamilyHandles = columnFamilyHandles;
    }

    public RocksDB getRocksDB() {
        return rocksDB;
    }

    public void setRocksDB(RocksDB rocksDB) {
        this.rocksDB = rocksDB;
    }

    @Override
    public void close(){
        if(rocksDB != null) rocksDB.close();
    }

    public static void main(String[] args) throws Exception{
        GenericObjectPool<XeihuaPersitence> pool = RocksPool.getInstance();
        XeihuaPersitence persitence = pool.borrowObject();
        Integer count1 = persitence.countSuccessJob(LocalDateTime.MIN,LocalDateTime.now(),"b2c_task");
        Integer count2 = persitence.countfailJob(LocalDateTime.MIN,LocalDateTime.now(),"b2c_task");
        System.out.println("count===>"+count1);
        System.out.println("count===>"+count2);
        pool.returnObject(persitence);
    }




}
