package team.bluepen.supermarket.data.hdfs;

import org.apache.hadoop.fs.Path;
import team.bluepen.supermarket.data.entity.HbaseItem;
import team.bluepen.supermarket.data.hbase.LineReader;
import team.bluepen.supermarket.util.StringUtil;

import java.io.*;
import java.util.ArrayList;
import java.util.List;

/**
 * 从文件读入
 * @author Kai
 */
public class HdfsFileProcessor<T extends HbaseItem<T>> {
    private static final String TEMP_DIRECTORY = "market/temp";
    private final Path path;
    private final LineReader<T> lineReader;

    public HdfsFileProcessor(Path path, LineReader<T> tLineReader) {
        this.path = path;
        this.lineReader = tLineReader;
        File file = new File(TEMP_DIRECTORY);
        if (!file.exists()) {
            file.mkdirs();
        }
    }

    public HdfsFileProcessor(String path, LineReader<T> tLineReader) {
        this(new Path(path), tLineReader);
    }

    public List<T> read() throws IOException {
        List<T> ts = new ArrayList<>();
        String fileName = TEMP_DIRECTORY + "/" + StringUtil.getRandomString(16);
        File file = new File(fileName);
        if (!file.exists()) {
            file.createNewFile();
        }

        FileOutputStream outputStream = new FileOutputStream(file);
        HdfsManager.getInstance().download(path.toUri().getPath(), outputStream);

        BufferedReader bufferedReader = new BufferedReader(new FileReader(file));
        String line;
        while ((line = bufferedReader.readLine()) != null) {
            T t = lineReader.readLine(line);
            if (t != null) {
                ts.add(t);
            }
        }

        outputStream.close();
        bufferedReader.close();
        return ts;
    }
}
