package com.example.demo.demos.service;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.*;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;

@Service
public class HdfsFileWriter {

    private static final Logger logger = LoggerFactory.getLogger(HdfsFileWriter.class);
    private static final String HDFS_URI = "hdfs://192.168.5.128:9000";
    private static final String DIRECTORY_PATH = "/tmp/data"; 
    private static final String LOCAL_CSV_FILE = "/Users/apple/Documents/part/2023工程实践2-数据科学212/spark/ratings.csv";
    private static final int BATCH_SIZE = 5;
    private long lineCounter = 0;

    private FileSystem fileSystem;

    public HdfsFileWriter() {
        try {
            Configuration conf = new Configuration();
            conf.set("fs.defaultFS", HDFS_URI);
            this.fileSystem = FileSystem.get(conf);
        } catch (IOException e) {
            logger.error("初始化HDFS文件系统时出错: {}", e.getMessage());
        }
    }

    @Scheduled(fixedDelay = 10000)
    public void writeRatingsToHdfs() {
        logger.info("开始写入HDFS文件...");

        String timestamp = new SimpleDateFormat("yyyyMMddHHmmss").format(new Date());
        String filePath = DIRECTORY_PATH + "/ratings-" + timestamp + ".csv";
        Path hdfsPath = new Path(filePath);

        try (FSDataOutputStream out = fileSystem.create(hdfsPath);
             BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8"))) {

            try (Stream<String> lines = Files.lines(Paths.get(LOCAL_CSV_FILE))) {
                List<String> batch = lines.skip(lineCounter).limit(BATCH_SIZE).collect(Collectors.toList());
                lineCounter += batch.size();

                for (String line : batch) {
                    writer.write(line);
                    writer.newLine();
                }
            }

            logger.info("成功写入HDFS文件: {}", hdfsPath.getName());
        } catch (IOException e) {
            logger.error("写入HDFS时出错: {}", e.getMessage());
        }
    }
}
