package com.car_test;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class car_hdfs {

    /**
     * 将本地文件上传到HDFS
     *
     * @param localFilePath 本地文件路径
     * @param hdfsFilePath HDFS目标路径
     * @throws IOException
     * @throws URISyntaxException
     * @throws InterruptedException
     */
    public static void uploadFileToHDFS(String localFilePath, String hdfsFilePath)
            throws IOException, URISyntaxException, InterruptedException {

        // 创建Hadoop配置
        Configuration conf = new Configuration();

        // 设置HDFS地址，根据你的环境修改
        conf.set("fs.defaultFS", "hdfs://192.168.128.130:8020");

        // 如果是Windows系统需要设置Hadoop home
        // System.setProperty("hadoop.home.dir", "D:/hadoop-3.3.4");

        // 获取HDFS文件系统对象
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.128.130:8020"), conf, "root");

        // 创建源路径和目标路径
        Path srcPath = new Path(localFilePath);
        Path dstPath = new Path(hdfsFilePath);

        // 上传文件到HDFS
        fs.copyFromLocalFile(srcPath, dstPath);

        System.out.println("文件上传成功！" + localFilePath + " -> " + hdfsFilePath);

        // 关闭文件系统连接
        fs.close();
    }

    public static void main(String[] args) {
        try {
            // 示例：将本地文件上传到HDFS
            String localFilePath = "D:/car/data.csv";  // 本地文件路径
            String hdfsFilePath = "hdfs://192.168.128.130:8020/input/car";  // HDFS目标路径

            uploadFileToHDFS(localFilePath, hdfsFilePath);

        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
