package com.min.hdfs;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.sql.*;

public class DataToHdfs {
    /**
     *  先上传数据到hdfs
     *
     *
     * @param args
     * @throws IOException
     */
    public static void main(String[] args)  throws IOException {
        String journalismTypePath = "E:\\workspase\\journalism\\data\\journalismType.csv";
        String sinaPath = "E:\\workspase\\journalism\\data\\sina.csv";
        String worldNewsPath = "E:\\workspase\\journalism\\data\\worldNews.csv";
        //1.加载hdfs的配置文件
        Configuration conf=new Configuration();
        conf.set("fs.defaultFS", "hdfs://hadoop1:9000");
        //2.获取hdfs的操作对象，得到一个FileSystem对象
        FileSystem fs=FileSystem.get(conf);
        //3.创建源目的文件路径和文件上传操作
        Path TypePath =  new Path(journalismTypePath);

        Path SinaPath =  new Path(sinaPath);
        Path WorldNewsPath =  new Path(worldNewsPath);
        Path dst=new Path("/");

        fs.copyFromLocalFile(TypePath, dst);
        fs.copyFromLocalFile(SinaPath, dst);
        fs.copyFromLocalFile(WorldNewsPath, dst);
        //4.关闭流
        fs.close();
        System.out.println("文件上传成功！");


    }
}
