package model;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.wltea.analyzer.lucene.IKAnalyzer;

import java.io.IOException;
import java.io.StringReader;
import java.util.HashSet;
import java.util.Set;

/**
 * Created by liuyaowen on 2016/8/26.
 */
public class Utils {
    public final static String localLongIp = "2130706433";
    static Analyzer analyzer = new IKAnalyzer(true);

    public static  Set<String> titleSet =new HashSet();

    public static final long ip2Long(final String ip) {

        final String[] ipNums = ip.split("\\.");
        return (Long.parseLong(ipNums[0]) << 24)
                + (Long.parseLong(ipNums[1]) << 16)
                + (Long.parseLong(ipNums[2]) << 8)
                + (Long.parseLong(ipNums[3]));
    }


    public static final String analyzer_string(String intString) {
        StringBuffer sb = new StringBuffer();
        //获取Lucene的TokenStream对象
        TokenStream ts = null;
        try {
            ts = analyzer.tokenStream("myfield", new StringReader(intString));
            //获取词元位置属性
            OffsetAttribute offset = ts.addAttribute(OffsetAttribute.class);
            //获取词元文本属性
            CharTermAttribute term = ts.addAttribute(CharTermAttribute.class);
            //获取词元文本属性
            TypeAttribute type = ts.addAttribute(TypeAttribute.class);


            //重置TokenStream（重置StringReader）
            ts.reset();
            //迭代获取分词结果
            while (ts.incrementToken()) {
                sb.append(term.toString()).append(" ");
            }
            //关闭TokenStream（关闭StringReader）
            ts.end();   // Perform end-of-stream operations, e.g. set the final offset.

        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            //释放TokenStream的所有资源
            if (ts != null) {
                try {
                    ts.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        return sb.toString();
    }


    public static final String get_question_content_fulltext(String intString) {
        String str = analyzer_string(intString);
        String str_code = encode_search_code(str);
        return str_code;
    }

    public static final String encode_search_code(String intString) {
        StringBuffer sb = new StringBuffer();
        char[] strings = intString.toCharArray();


        for (int i = 0; i < strings.length; i++) {
            int aint = (int) strings[i];

            if (32 == aint) {
                sb.append(" ");
            } else {
                sb.append(aint);
            }
        }
        return sb.toString();
    }




    public static void main(String[] args) {
        String str = get_question_content_fulltext("测试刘耀文");
        System.out.println(str);

    }

    public static void add_title_set(String title) {
        if (titleSet.size()>100000){
            titleSet.clear();
            titleSet=new HashSet<>();
        }
        titleSet.add(title);
    }
}
