package com.samp.solr.solrlstm;

import com.samp.util.FilesUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;

import java.io.BufferedReader;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;

public class ModelDataGenerate {

    private static void generateTrainingData(String rawPath, String collectionName, String outFile)throws Exception{
        Path filePath = Paths.get(rawPath);
        BufferedReader bufferReader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8);
        String line;
        try {
            List<String> resultList = new ArrayList<>();
            resultList.add("id\tqid1\tqid2\tquestion1\tquestion2\tis_duplicate");
            String solrUrl = SolrlstmUtils.BASE_SOLR_URL+collectionName;
            SolrClient solrClient = new HttpSolrClient.Builder(solrUrl).build();
            Integer index = 0;
            long allStart = System.currentTimeMillis();
            long start = System.currentTimeMillis();
            while( (line = bufferReader.readLine()) != null ){
                if( StringUtils.isBlank(line) ){
                    System.out.println("blank line,continue..");
                    continue;
                }
                String[] splits = line.split("\t");
                if( splits.length != 2 ){
                    System.out.println("error data:"+line);
                    continue;
                }
                String rawWord = splits[0];
                String rawIntention  = splits[1];
                String intention = SolrlstmUtils.extractIntentionWord(rawIntention);
                if( intention == null ){
                    continue;
                }
                if( intention == null ){
                    continue;
                }
                int recordNeed = SolrlstmUtils.TOP_N+1;
                List<WordIntention> solrResultList = SolrlstmUtils.getTopNIntentions(solrClient, rawWord, recordNeed);
                if( solrResultList.size() < recordNeed ){
                    recordNeed = solrResultList.size();
                }
                Set<String> removeDulSet = new HashSet<>();
                for(int i = 0 ; i < recordNeed-1; i++){
                    WordIntention wi = solrResultList.get(i);
                    WordIntention wiNext = solrResultList.get(i+1);
                    if( wi.getScore() > SolrlstmUtils.MIN_SCORE && wi.getScore()/wiNext.getScore() > SolrlstmUtils.MIN_RATE ){
                        continue;
                    }
                    Integer isMatch = 0;
                    if( wi.getIntention().equals(rawIntention) ){
                        isMatch = 1;
                    }
//                    String solrIntention = SolrlstmUtils.extractIntentionWord(wi.getIntention());
//                    String dulFlag = solrIntention;
                    String dulFlag = wi.getWord();
                    if( !removeDulSet.contains(dulFlag) ) {
//                        resultList.add(getTrainingData(index, rawWord, solrIntention, isMatch));
                        resultList.add(getTrainingData(index, rawWord, wi.getWord(), isMatch));
                        removeDulSet.add(dulFlag);
                        index++;
                    }
                    if( index % 5000 == 0 ){
                        System.out.println("processed 5000 lines, cost="+((System.currentTimeMillis()-start)*1.0/1000)+" 秒,index="+index);
                        start = System.currentTimeMillis();
                    }
                }
            }
            FilesUtils.saveListToFile(resultList,outFile);
            System.out.println("save "+ resultList.size()+" lines, cost "+((System.currentTimeMillis()-allStart)*1.0/(1000*60))+" 分钟");
        }catch (IOException e) {
            e.printStackTrace();
        }finally{
            bufferReader.close();
        }
    }

    private static void generateTestData(String rawPath, String collectionName, String customPath, String stopWodsPath,
                                         String synonymPath, String weightPath,String outFile) throws Exception{
        Path filePath = Paths.get(rawPath);
        BufferedReader bufferReader = Files.newBufferedReader(filePath, StandardCharsets.UTF_8);
        try {
            String line;
            String solrUrl = SolrlstmUtils.BASE_SOLR_URL+collectionName;
            SolrClient solrClient = new HttpSolrClient.Builder(solrUrl).build();
            List<String> resultList = new ArrayList<>();
            resultList.add("id\tqid1\tqid2\tquestion1\tquestion2\tis_duplicate");
            Integer index = 0;
            Integer count = 0;
            Integer satisfyNum = 0;
            Integer satisfySuccessNum = 0;
            long start = System.currentTimeMillis();
            SolrlstmUtils.initialWeightMap(weightPath);
            TokenizeUtils.loCustomDict(customPath);
            TokenizeUtils.loadSynonymDict(synonymPath);
            TokenizeUtils.loadStopwordsDict(stopWodsPath);
            bufferReader.readLine();
            while( (line = bufferReader.readLine()) != null ){
                String[] words = line.split("\t");
                if( words.length != 3  ){
                    System.out.println("error data:"+line);
                    continue;
                }
                count++;
                if( count % 5000 == 0 ){
                    System.out.println("processed 5000 lines, cost="+((System.currentTimeMillis()-start)*1.0/1000)+" 秒, count="+count);
                    start = System.currentTimeMillis();
                }
                String usedIntention =SolrlstmUtils.removeLastSlot(words[2]);
                String rawWord = TokenizeUtils.tokenize(words[1]);
                if( StringUtils.isBlank(rawWord) ){
                    System.out.println("blank words:"+line);
                    continue;
                }
                List<WordIntention> topList = SolrlstmUtils.getTopNIntentions(solrClient, rawWord, 5);
                if( topList == null || topList.size() == 0 ){
                    System.out.println("no result found in solr, "+rawWord);
                    continue;
                }
                WordIntention firstMatch = topList.get(0);
                WordIntention secondMatch = SolrlstmUtils.getSecondMatch(topList);
                if( SolrlstmUtils.isSatisfyThreshold(firstMatch,secondMatch) ){
                    satisfyNum++;
                    if( firstMatch.getIntention().equals(usedIntention) ){
                        satisfySuccessNum++;
                    }
                    continue;
                }else{
                    Set<String> removeDulSet = new HashSet<>();
                    for(WordIntention wi: topList){
                        Integer isMatch = 0;
                        if( wi.getIntention().equals(usedIntention) ){
                            isMatch = 1;
                        }
                        String solrIntention = SolrlstmUtils.extractIntentionWord(wi.getIntention());
//                        String dulFlag = solrIntention;
                        String dulFlag = wi.getWord();
                        if( !removeDulSet.contains(dulFlag) ) {
//                            resultList.add(getTrainingData(index, rawWord, solrIntention, isMatch)); // A-B1 type
                            resultList.add(getTrainingData(index, rawWord, wi.getWord(), isMatch)); // A-B1 type
                            removeDulSet.add(dulFlag);
                            index++;
                        }
                    }
                }
            }
            System.out.println("satisfySuccessNum="+satisfySuccessNum+",satisfyNum="+satisfyNum+", count="+count
                    +", satisfyRate="+satisfyNum.floatValue()/count+", satisfySuccessRate="+satisfySuccessNum.floatValue()/satisfyNum);
            System.out.println("test file size = "+resultList.size());
            FilesUtils.saveListToFile(resultList, outFile);
        }catch (IOException e) {
            e.printStackTrace();
        }finally{
            bufferReader.close();
        }
    }



    private static String getTrainingData(Integer index, String word, String intention, Integer isMatch){
        StringBuilder sb = new StringBuilder();
        sb.append(index).append("\t").append(index).append("\t").append(index).append("\t");
        sb.append(word).append("\t").append(intention).append("\t").append(isMatch);
        return sb.toString();
    }

    public static void main(String[] args) {
        String callMethod = "generateTestData";
        String collectionDirName = "lifeareanavi";
        String collectionFullPath = SolrlstmUtils.BASE_PATH+collectionDirName+"\\";
        String collectionName = "GCC-OMCP_NLPLifeAreaExtensionNavi";
        try{
            if ("generateTrainingData".equals(callMethod) || "all".equals(callMethod)) {
                String solrFilePath = collectionFullPath+"solr_data_02_xx.txt";
                String trainingFile = collectionFullPath+"training_02.txt";
                generateTrainingData(solrFilePath,collectionName, trainingFile);
            }
            if ("generateTestData".equals(callMethod) || "all".equals(callMethod)) {
                String customPath = collectionFullPath+"tokenize_custom_xx.txt";
                String synonymPath = collectionFullPath+"tokenize_synonym_xx.txt";
                String weightPath = collectionFullPath+"tokenize_weight_xx.txt";
                String stopWordsPath = collectionFullPath+"stopwords.txt";
                String testFilePath = collectionFullPath+"raw_test_data.txt";
                String testFile = collectionFullPath+"test_02.txt";
                generateTestData(testFilePath,collectionName, customPath,stopWordsPath,synonymPath,weightPath,testFile);
            }
        }catch (Exception e){
            e.printStackTrace();
        }
        System.out.println("done all!!!");
    }

}
