package com.liaowei.article;

import lombok.SneakyThrows;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.WhitespaceTokenizer;
import org.codelibs.minhash.MinHash;

import java.io.ByteArrayInputStream;
import java.io.DataInputStream;

public class Main {
    public String name;
    @SneakyThrows
    public static void main(String[] args) {
        byte[] bytes = calculateMinHash("12312312");
        ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
        DataInputStream dis = new DataInputStream(bais);
        float i = dis.readFloat();
        System.out.println(i);
    }


    @SneakyThrows
    public static byte[] calculateMinHash(String text) {
        // Lucene's tokenizer parses a text.
        Tokenizer tokenizer = new WhitespaceTokenizer();
        // The number of bits for each hash value.
        int hashBit = 1;
        // A base seed for hash functions.
        int seed = 0;
        // The number of hash functions.
        int num = 128;
        // Analyzer for 1-bit 32 hash with custom Tokenizer.
        Analyzer analyzer = MinHash.createAnalyzer(tokenizer, hashBit, seed, num);
        // Calculate a minhash value. The size is hashBit*num.
        return MinHash.calculate(analyzer, text);
    }

}
