package eight;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import java.io.BufferedReader;
import java.io.FileWriter;
import java.io.InputStreamReader;
import java.net.URI;

public class SQING_ERRCHeader {

    public static void readAndWriteJson(String hdfsUri, String outputPath, String localJsonPath) {
        Configuration conf = new Configuration();
        FileSystem fs = null;
        BufferedReader br = null;
        FileWriter fileWriter = null;

        try {
            fs = FileSystem.get(new URI(hdfsUri), conf);
            Path path = new Path(outputPath);

            br = new BufferedReader(new InputStreamReader(fs.open(path)));

            ObjectMapper mapper = new ObjectMapper();
            ArrayNode jsonArray = mapper.createArrayNode();

            String line;

            while ((line = br.readLine()) != null) {
                // 确保当前行包含"2018"
                if (line.contains("2018")) {
                    //System.out.println(line);
                    String[] fields = line.split(",");

                    // 确保fields数组至少有两个元素以避免索引越界错误
                    if (fields.length >= 2) {
                        ObjectNode jsonObject = mapper.createObjectNode();
                        jsonObject.put("year", fields[0]); // 假设第一列是年份
                        jsonObject.put("qingdao_id", fields[1]); // 假设第二列是qingdao_id

                        // 如果有计数字段且在预期的位置，添加到jsonObject
                        if (fields.length > 2) {
                            jsonObject.put("count", fields[2]); // 假设第三列是count
                        }

                        jsonArray.add(jsonObject);
                    } else {
                        System.out.println("警告：数据格式不正确，无法解析：" + line);
                    }
                }
            }

            fileWriter = new FileWriter(localJsonPath);
            mapper.writerWithDefaultPrettyPrinter().writeValue(fileWriter, jsonArray);
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            IOUtils.closeStream(br);
            IOUtils.closeStream(fileWriter);
        }
    }

    public static void main(String[] args) {
        String hdfsUri = "hdfs://192.168.43.129:8020";
        String outputPath = "/output/output_20240617161929/part-r-00000";
//        String localJsonPath = "hot_item.json";
        String localJsonPath = "src/main/resources/html/item3.json";

        readAndWriteJson(hdfsUri, outputPath, localJsonPath);
    }
}
