package com.ericsson.epc.modules.userebm.service.impl;

import com.ericsson.common.util.*;
import com.ericsson.epc.modules.userebm.entity.CollectDeploy;
import com.ericsson.epc.modules.userebm.entity.EbmIpImsiMongo;
import com.ericsson.epc.modules.userebm.entity.EpcDictEBM;
import com.ericsson.epc.modules.userebm.entity.UserEbmData;
import com.ericsson.epc.modules.userebm.mapper.EpcDictEBMMapper;
import com.ericsson.epc.modules.userebm.mapper.UserEbmMapper;
import com.ericsson.epc.modules.userebm.service.UserEbmService;
import com.ericsson.epc.modules.userebm.thread.MultiThread;
import com.mongodb.BasicDBObject;
import com.mongodb.client.model.IndexModel;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.dao.DataAccessException;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.stereotype.Service;

import java.util.*;

@Service("userEbmService")
public class UserEbmServiceImpl implements UserEbmService {
    private Logger log = LoggerUtils.Logger(LogFileName.UserEbmJob);
    @Autowired
    private MongoTemplate mongoTemplate;
    @Autowired
    private UserEbmMapper userEbmMapper;
    @Autowired
    private EpcDictEBMMapper epcDictEBMMapper;
    //mongodb中集合的过期时间
    private static String EBMIPIMSI_MONGO_TIME="EBMIPIMSI_MONGO_TIME";
    //处理的线程数
    private static String EBMIPIMSI_THREAD_COUNT="EBMIPIMSI_THREAD_COUNT";

    public List<CollectDeploy> getCollectList(){
        List<CollectDeploy> collectDeployList=userEbmMapper.getCollectDeployList();
        return collectDeployList;
    }

    /**
     * 根据话单采集器数量 开启线程
     * @param collectDeployList
     */
    @Override
    public Map<String,Set<String>> dealUserEbm(List<CollectDeploy> collectDeployList,String programDate) {
        String fileDate=programDate.substring(0,10).replace("-","");
        Integer threadCount=getThreadCount();
        Map<String,String>fileName=dealFileName(threadCount);
        log.info("dealUserEbm start");
        MultiThread<CollectDeploy, Map<String,Set<String>>> multiThread = new MultiThread<CollectDeploy,Map<String,Set<String>>>(collectDeployList){
            @Override
            public Map<String,Set<String>> outExecute(int currentThread, CollectDeploy data) {
                List<UserEbmData> userEbmDataList=new ArrayList<>();
                UserEbmData userEbmData;
                for(int i=0;i<threadCount;i++){
                    userEbmData=new UserEbmData();
                    userEbmData.setCollectDeploy(data);
                    userEbmData.setFileName(fileName.get(String.valueOf(i)));
                    userEbmDataList.add(userEbmData);
                }
                log.info("get single collect thread current start:"+currentThread+":"+data);
                Map<String,Set<String>> resultSingleCollect=dealUserEbmSingle(userEbmDataList,fileDate);
                log.info("get single collect thread current end:"+currentThread+":"+data+":size:"+resultSingleCollect.size());
                return resultSingleCollect;
            }
        };
        Map<String,Set<String>> resultCollect=new HashMap<>();
        try {
            List<Map<String,Set<String>>> resultList=multiThread.getResult();
            log.info("dealUserEbm end collect size:"+resultList.size());
            for(int n=0;n<resultList.size();n++){
                resultCollect=CollectionUtils.mergeMap(resultCollect,resultList.get(n));
            }
        }catch (Exception ex){
            log.error("get single collect error"+ex.getMessage());
        }
        return resultCollect;
    }

    /**
     * 处理单台话单采集器数据
     * @param userEbmDataList
     * @return
     */
    public Map<String,Set<String>> dealUserEbmSingle(List<UserEbmData> userEbmDataList,String fileDate){
        List<Map<String,Set<String>>> resultList=null;
        MultiThread<UserEbmData, Map<String,Set<String>>> multiThreadFile = new MultiThread<UserEbmData,Map<String,Set<String>>>(userEbmDataList){
            @Override
            public Map<String,Set<String>> outExecute(int currentThread, UserEbmData data) {
                Map<String, Set<String>> map = new HashMap<>();
                Map<String,Set<String>> tempmap = new HashMap<>();
                SSHUtil sshUtil=null;
                try {
                    Integer startFilename=Integer.parseInt(data.getFileName().split(":")[0]);
                    Integer endFilename=Integer.parseInt(data.getFileName().split(":")[1]);
                    sshUtil = new SSHUtil(data.getCollectDeploy().getIp(), data.getCollectDeploy().getUsername(), data.getCollectDeploy().getPassword());
                    sshUtil.connect();
                    sshUtil.getShellChannel();
                    log.info("get single data thread current start:"+currentThread+":"+data);
                    for(int i=startFilename;i<=endFilename;i++) {
                        log.info("run:"+currentThread+":"+data+":"+String.format("%04d", i));
                        tempmap=sshUtil.execCommandMap("lua /opt/Ericsson/core/bin/cdr_ip.lua /opt/Ericsson/core/FILE/usercdrdown/ericsson/cdrfiledone/desc/"+fileDate+"/" + String.format("%04d", i));
                        //每个线程内的各个文件合并
                        map=CollectionUtils.mergeMap(map,tempmap);
                    }
                }catch (Exception ex){
                    log.error("get single data thread error current"+currentThread+ex.getMessage()+data);
                }
                finally{
                    if(null!=sshUtil) {
                        sshUtil.disconnect();
                    }
                }
                log.info("get single data thread current end:"+currentThread+":"+data+":mapSize:"+map.size());
                return map;
            }
        };
        try {
            //每台话单采集器的数据
            resultList=multiThreadFile.getResult();
        }catch (Exception ex){
            log.error("get single data error"+ex.getMessage());
        }
        Map<String,Set<String>> singleCollect=new HashMap<>();
        //合并每台采集器的数据
        for(int m=0;m<resultList.size();m++){
            singleCollect=CollectionUtils.mergeMap(singleCollect,resultList.get(m));
        }
        resultList.clear();
        return singleCollect;
    }

    /**
     * 根据线程数分配每一个线程处理的文件
     * @param threadCount
     * @return
     */
    private Map<String,String> dealFileName(Integer threadCount){
        Map<String,String> fileNameMap=new HashMap<>();
        //为线程分配读取文件数
        Integer lines = 10000;     //文件总行数
        Integer line;                       //每个线程分配行数
        String start_line;                 //线程读取文件开始行数
        String end_line="0";                   //线程读取文件结束行数
        Integer threadNum=threadCount;
        for (int i = 1, tempLine = 0; i <= threadNum; i++, tempLine = (Integer.parseInt(end_line))+1) {
            Integer var1 = lines / threadNum;
            Integer var2 = lines % threadNum;
            line = (i == threadNum) ? (var2 == 0 ? var1 : var1 + var2) : var1;
            start_line = (i == 1) ? "0000" : String.valueOf(tempLine);
            end_line = (i == threadNum) ? String.valueOf(lines) : String.valueOf(Integer.parseInt(start_line) + line - 1);
            if(start_line.length()<4) {
                start_line="0"+start_line;
            }
            if(end_line.length()<4){
                end_line="0"+end_line;
            }else if(end_line.length()>4) {
                end_line="9999";
            }
            fileNameMap.put(String.valueOf(i-1),start_line+":"+end_line);
        }
        return fileNameMap;
    }

    public void batchInsertEbmIpImsiDataList(List<EbmIpImsiMongo> ebmIpImsiMongoList, String programDate) throws DataAccessException {
        int pointsDataLimit = 500;//限制条数
        Integer size = ebmIpImsiMongoList.size();
        //判断是否有必要分批
        if(pointsDataLimit<size){
            int part = size/pointsDataLimit;//分批数　　　　　　//
            for (int i = 0; i < part; i++) {
                //500条
                List<EbmIpImsiMongo> listPage = ebmIpImsiMongoList.subList(0, pointsDataLimit);
                insertEbmIpImsiDataList(listPage,programDate);
                //剔除
                ebmIpImsiMongoList.subList(0, pointsDataLimit).clear();
            }
            if(!ebmIpImsiMongoList.isEmpty()){
                insertEbmIpImsiDataList(ebmIpImsiMongoList,programDate);
            }
        }else{
            insertEbmIpImsiDataList(ebmIpImsiMongoList,programDate);
        }
    }

    public void insertEbmIpImsiDataList(List<EbmIpImsiMongo> ebmIpImsiMongoList, String programDate) {
        String collectionName="ebm_ipimsi_"+programDate.substring(0,10).replace("-","");
        if(!mongoTemplate.collectionExists(collectionName)) {
            List<IndexModel> indexModelList = new ArrayList<>();
            BasicDBObject index = new BasicDBObject();
            index.put("ip", 1);
            indexModelList.add(new IndexModel(index));
            mongoTemplate.createCollection(collectionName).createIndexes(indexModelList);;
        }
        mongoTemplate.insert(ebmIpImsiMongoList,collectionName);
        this.dropPastCollection();
    }

    /**
     * 删除过期的集合
     */
    private void dropPastCollection(){
        EpcDictEBM epcDict=new EpcDictEBM();
        epcDict.setType(EBMIPIMSI_MONGO_TIME);
        epcDict= epcDictEBMMapper.selectEpcDictEBM(epcDict);
        if(null!=epcDict&&!"".equals(epcDict.getValue())) {
            String pastDate= DateUtils.getPastDate(Integer.parseInt(epcDict.getValue()),"yyyyMMdd");
            if(mongoTemplate.collectionExists("ebm_ipimsi_"+pastDate)) {
                mongoTemplate.dropCollection("ebm_ipimsi_"+pastDate);
            }
        }
    }

    /**
     * 获取数据库中配置的线程数
     * @return
     */
    private int getThreadCount(){
        EpcDictEBM epcDict=new EpcDictEBM();
        epcDict.setType(EBMIPIMSI_THREAD_COUNT);
        epcDict= epcDictEBMMapper.selectEpcDictEBM(epcDict);
        int result=0;
        if(null!=epcDict&&!"".equals(epcDict.getValue())) {
            result=Integer.parseInt(epcDict.getValue());
        }
        return result;
    }

}
