package com.chinasoft.DealAirOriginData.api;

import com.chinasoft.DealAirOriginData.pojo.AirOriginData;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import scala.reflect.io.Path$;

import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;

public class HdfsApi {
    public List<AirOriginData> getAirList() throws Exception {
        List<AirOriginData> list = new ArrayList<>();
        Configuration conf = new Configuration(true);
        FileSystem fs = FileSystem.get(conf);

        //读取hadoop文件系统中的数据
        Path path = new Path("/user/airquality/preprocessed_data/000000_0");
        FSDataInputStream dis = fs.open(path);
        BufferedReader br = new BufferedReader(new InputStreamReader(dis));
        String str = null;
        while ((str = br.readLine()) != null){
            System.out.println(str);
            //根据文件中的分隔符提取出每个字段的值
            String[] split = str.split("\u0001");
            AirOriginData air = new AirOriginData(null,split[0],Integer.parseInt(split[1]),split[2],split[3],Double.parseDouble(split[4]));
            list.add(air);
        }

        if (fs != null) {
            fs.close();
        }
        return list;
    }
}
