package cn.com.zetatech.loader.entity;


import cn.com.zetatech.loader.util.FileUtil;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.Session;
import lombok.extern.slf4j.Slf4j;
import org.apache.avro.generic.GenericRecord;
import org.apache.parquet.avro.AvroParquetReader;
import org.apache.parquet.hadoop.ParquetReader;
import org.apache.parquet.io.InputFile;
import org.apache.parquet.io.SeekableInputStream;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Properties;

/**
 * @author jian.chen@zetatech.com.cn
 * @Description 从SFTP下载Parquet文件并将其解析为Map
 * @Date 2024/8/13 14:07
 */

@Slf4j
public class ParquetToMap {

    public static void main(String[] args) throws Exception {
        // 设置 hadoop.home.dir 到一个空的字符串，避免Hadoop依赖错误
        System.setProperty("hadoop.home.dir", "");
        System.setProperty("java.library.path", "");

        String sftpHost = "10.10.104.51";
        String sftpUser = "root";
        String sftpPassword = "bdp@2021";
        int sftpPort = 22;
        String remoteFilePath = "/IMAGES/2024/08/3b4f3f7233e68e9f-48853be000000000_1599481653_data.0.parq";

        // 步骤1：从SFTP服务器下载文件到内存中
        byte[] fileData = downloadFileFromSFTPToMemory(sftpHost, sftpUser, sftpPassword, sftpPort, remoteFilePath);
        System.out.println("下载的文件大小: " + (fileData != null ? fileData.length : 0) + " 字节");

        if (fileData != null) {
            // 步骤2：从内存中解析Parquet文件
            List<LinkedHashMap<String, String>> linkedHashMaps = FileUtil.parseParquetFileFromMemory(fileData);
            log.info(linkedHashMaps.toString());
            // 步骤3：清理文件数据占用的内存
            clearMemory(fileData);
        }
    }

    // 从SFTP服务器下载文件到内存中
    private static byte[] downloadFileFromSFTPToMemory(String host, String user, String password, int port, String remoteFile) {
        JSch jsch = new JSch();
        Session session = null;
        ChannelSftp channelSftp = null;
        ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();

        try {
            session = jsch.getSession(user, host, port);
            session.setPassword(password);

            Properties config = new Properties();
            config.put("StrictHostKeyChecking", "no");
            session.setConfig(config);

            session.connect();
            channelSftp = (ChannelSftp) session.openChannel("sftp");
            channelSftp.connect();

            try (InputStream inputStream = channelSftp.get(remoteFile)) {
                byte[] buffer = new byte[1024];
                int bytesRead;
                while ((bytesRead = inputStream.read(buffer)) != -1) {
                    byteArrayOutputStream.write(buffer, 0, bytesRead);
                }
            }

            System.out.println("文件从SFTP服务器下载成功。");
            return byteArrayOutputStream.toByteArray();
        } catch (Exception e) {
            e.printStackTrace();
            return null;
        } finally {
            if (channelSftp != null) {
                channelSftp.disconnect();
            }
            if (session != null) {
                session.disconnect();
            }
        }
    }

    // 从内存中解析Parquet文件
    private static List<LinkedHashMap<String, String>> parseParquetFileFromMemory(byte[] fileData) {
        LinkedHashMap<String, String> dataMap = new LinkedHashMap<>();
        List<LinkedHashMap<String, String>> tempResults = new ArrayList<>();
        try (SeekableInputStream inputStream = new SeekableByteArrayInputStream(fileData)) {
            InputFile inputFile = new InputFileFromSeekableInputStream(fileData);
            try (ParquetReader<GenericRecord> reader = AvroParquetReader.<GenericRecord>builder(inputFile).build()) {
                GenericRecord record;
                while ((record = reader.read()) != null) {
                    log.info("读取记录: " + record);
                    for (String fieldName : record.getSchema().getFields().stream().map(field -> field.name()).toArray(String[]::new)) {
                        Object value = record.get(fieldName);
                        if (value instanceof ByteBuffer) {
                            ByteBuffer byteBuffer = (ByteBuffer) value;
                            String stringValue = StandardCharsets.UTF_8.decode(byteBuffer).toString();
                            dataMap.put(fieldName, stringValue);
                        } else {
                            dataMap.put(fieldName, String.valueOf(value));
                        }
                        tempResults.add(dataMap);
                    }
                }
            }
            // 输出Map内容
//            dataMap.forEach((key, value) -> log.info(key + ": " + value));
        } catch (IOException e) {
            e.printStackTrace();
        }
        return tempResults;
    }

    // 清理内存
    private static void clearMemory(byte[] fileData) {
        // 通过将内容设置为0来清除字节数组（可选步骤）
        for (int i = 0; i < fileData.length; i++) {
            fileData[i] = 0;
        }
        System.out.println("内存已清理。");
    }
}

// 辅助类：用于ParquetReader处理SeekableInputStream
//class SeekableByteArrayInputStream extends SeekableInputStream {
//    private final byte[] data;
//    private int position = 0;
//
//    public SeekableByteArrayInputStream(byte[] data) {
//        this.data = data;
//    }
//
//    @Override
//    public void seek(long newPos) throws IOException {
//        if (newPos < 0 || newPos > data.length) {
//            throw new IOException("无效的Seek位置: " + newPos);
//        }
//        position = (int) newPos;
//    }
//
//    @Override
//    public long getPos() throws IOException {
//        return position;
//    }
//
//    @Override
//    public int read() throws IOException {
//        if (position >= data.length) {
//            return -1;
//        }
//        return data[position++] & 0xFF;
//    }
//
//    @Override
//    public int read(byte[] b, int off, int len) throws IOException {
//        if (position >= data.length) {
//            return -1;
//        }
//        int bytesRead = Math.min(len, data.length - position);
//        System.arraycopy(data, position, b, off, bytesRead);
//        position += bytesRead;
//        return bytesRead;
//    }
//
//    @Override
//    public void readFully(byte[] bytes, int i, int i1) throws IOException {
//        read(bytes, i, i1);
//    }
//
//    @Override
//    public void readFully(byte[] bytes) throws IOException {
//        read(bytes, 0, bytes.length);
//    }
//
//    @Override
//    public int read(ByteBuffer byteBuffer) throws IOException {
//        int bytesToRead = Math.min(byteBuffer.remaining(), data.length - position);
//        byteBuffer.put(data, position, bytesToRead);
//        position += bytesToRead;
//        return bytesToRead;
//    }
//
//    @Override
//    public void readFully(ByteBuffer byteBuffer) throws IOException {
//        read(byteBuffer);
//    }
//
//    @Override
//    public void close() throws IOException {
//        // 内存字节数组无需关闭
//    }
//}

// 辅助类：从SeekableInputStream创建InputFile
//class InputFileFromSeekableInputStream implements InputFile {
//    private final byte[] data;
//
//    public InputFileFromSeekableInputStream(byte[] data) {
//        this.data = data;
//    }
//
//    @Override
//    public long getLength() throws IOException {
//        return data.length;
//    }
//
//    @Override
//    public SeekableInputStream newStream() throws IOException {
//        return new SeekableByteArrayInputStream(data);
//    }
//}
