package yz.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URI;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.Statement;

public class demo9HDFSToMysql {
    public static void main(String[] args) throws Exception{
        //创建mysql对象,

        //这是通过反射的机制来创建
        Class.forName("com.mysql.jdbc.Driver");

        Connection connection = DriverManager.getConnection("jdbc:mysql://master:3306/goods?useSSL=false&useUnicode=true&characterEncoding=utf8",
                "root","123456");

        Statement statement = connection.createStatement();

        Configuration entries = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://master:9001"), entries,"root");


        //从hdfs中读出数据
        FSDataInputStream open = fileSystem.open(new Path("/api/jd.txt"));
        InputStreamReader inputStreamReader = new InputStreamReader(open);
        BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
        String line;
        while ((line=bufferedReader.readLine()) !=null){
            String[] split2 = line.split(",");
            //System.out.println(split2.length);
            String sql = String.format(
                    "insert into hdfsToMysql values('%s','%s','%s','%s','%s')", split2[0], split2[1], split2[2], split2[3], split2[4]
            );

//            String sql = String.format(
//                    "insert into hdfsToMysql values('%s','%s','%s','%s','%s')", split2[1], split2[2], split2[3], split2[4], split2[5]
//            );

            statement.executeUpdate(sql);
            System.out.println(sql);
        }

        statement.close();
        connection.close();
        open.close();
        bufferedReader.close();
    }
}
