package com.shengzai.hdfs.text;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.*;
import java.sql.*;

/*
    读取MySQL中的Cars数据，并将结果写入至HDFS中，要求HDFS对应的文件字段分隔符为, 并且能够换行
*/
public class HdfsText1 {
    public static void main(String[] args) throws IOException, ClassNotFoundException, SQLException {
        Class.forName("com.mysql.jdbc.Driver");
        Connection con = DriverManager.getConnection("jdbc:mysql://192.168.146.136:3306/bigdata22?useSSL=False&useUnicode=true&characterEncoding=UTF8","root","123456");
        Statement statement = con.createStatement();
        ResultSet resultSet = statement.executeQuery("select * from car_info");
        Configuration entries = new Configuration();
        entries.set("fs.defaultFS","hdfs://master:9000");
        FileSystem fileSystem = FileSystem.get(entries);
        FSDataOutputStream fs = fileSystem.append(new Path("/input/b.txt"));

        OutputStreamWriter outputStreamWriter = new OutputStreamWriter(fs);
        while (resultSet.next()){
            String str1 = resultSet.getString(1);
            String str2 = resultSet.getString(2);
            String str3 = resultSet.getString(3);
            String str4 = resultSet.getString(4);
            String str5 = resultSet.getString(5);
            String str6 = resultSet.getString(6);
            String str7 = resultSet.getString(7);
            String str8 = resultSet.getString(8);
            String str9 = resultSet.getString(9);
            outputStreamWriter.write(str1+","+str2+","+str3+","+str4+","+str5+","+str6+","+str7+","+str8+","+str9+"\n");
            outputStreamWriter.flush();
        }
        outputStreamWriter.close();
        fs.close();
        fileSystem.close();
        statement.close();
        con.close();

    }
}
