package com.practice.car.cardataapp.schelduer.scrapy;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import java.io.*;
import java.net.URI;
import java.text.DateFormat;
import java.util.Date;
import java.util.Properties;

/**
 * car-data-app project
 *
 * @author 叶旺江
 * @date 2020/9/8
 */


public class Hdfs {

    //测试代码
    public static void main(String[] args)  {

        //处理旧数据
        handleOldData();

        //测试读取hdfs数据
        //System.out.println(readJsonFile());
        uploadFiles();

    }

    //1.删除本地的.json文件，避免爬虫的数据格式出错
    //2.处理hdfs上的旧的数据文件：将文件改名为 old-年-月-日.json
    public static void handleOldData()  {
        //删除本地.json文件
        ScrapyScheduler.cmd("sh /root/python/delete.sh");

        createDirectory();

        //为hdfs上的所有旧文件重命名：改为 原名-old-年-月-日.json
        InputStream is = Hdfs.class.getClassLoader().getResourceAsStream("hdfs_data.properties");
        Properties properties = new Properties();
        try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(is))) {
            properties.load(bufferedReader);
            //重命名 add_national.json
//            String file_add_national = properties.getProperty("file_add_national");
//            renameForOldDataFile(file_add_national);

            //重命名 car.json
            String file_car = properties.getProperty("file_car");
            renameForOldDataFile(file_car);

            //重命名 complaints.json
            String file_complaints = properties.getProperty("file_complaints");
            renameForOldDataFile(file_complaints);

            //重命名sub_car.json
            String file_sub_car = properties.getProperty("file_sub_car");
            renameForOldDataFile(file_sub_car);

            //重命名car_grade.json
            String file_car_grade = properties.getProperty("file_car_grade");
            renameForOldDataFile(file_car_grade);
        }catch(IOException e){
            e.printStackTrace();
        }
    }

    //判断hdfs是否存在目录/usr/root/ ，如果不存在则创建
    public static void createDirectory() {
        FileSystem fs = null;
        try{
            //1.获取文件系统
            fs = getFileSystem();

            //2.如果目录不存在，则创建目录
            InputStream is = Hdfs.class.getClassLoader().getResourceAsStream("hdfs_data.properties");
            Properties properties = new Properties();
            try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(is))) {
                properties.load(bufferedReader);
                Path dirPath = new Path(properties.getProperty("hdfs_working_directory"));
                if (!fs.exists(dirPath)) {
                    boolean mkdirs = fs.mkdirs(dirPath);
                    System.out.println("创建目录：" + dirPath.getName());
                } else {
                    System.out.println("目录已存在");
                }
                System.out.println("OK");
            }
        } catch (IOException e) {
            e.printStackTrace();
        }finally {
            if(fs!=null)
                try {
                    fs.close();
                }catch(IOException e){
                    e.printStackTrace();
                }
        }
    }

    //更改旧文件的文件名--重命名
    public static void renameForOldDataFile(String fileName){
        FileSystem fs = null;
        try {
            fs = getFileSystem();

            // 修改文件
            Date date = new Date();
            System.out.println(date);
            DateFormat df = DateFormat.getDateInstance();  //日期格式为 : yy-mm-dd
            System.out.println(df.format(date));

            InputStream is = Hdfs.class.getClassLoader().getResourceAsStream("hdfs_data.properties");
            Properties properties = new Properties();
            try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(is))) {
                properties.load(bufferedReader);
                String oldName = properties.getProperty("hdfs_working_directory") + fileName;


                String newName = properties.getProperty("hdfs_working_directory") +
                        fileName + "-old-" + df.format(date) + ".json";

                Path oldFile = new Path(oldName); // hadoop文件对象, 类似java的File类
                Path newFile = new Path(newName);

                // 修改名字
                boolean isResult = false;
                if(fs.exists(oldFile)) {
                    isResult = fs.rename(oldFile, newFile);
                }
                //isResult = fs.rename(newFile,oldFile); //改为原名

                if (isResult != false) {
                    System.out.println(fileName + "  " + "文件已重命名");
                } else {
                    System.out.println(fileName + "  " +"文件重命名失败！");
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }finally{
            try{
                fs.close();
            }catch (IOException e){
                e.printStackTrace();
            }
        }
    }

    //读取json文件，逐行读取，文件内容以字符串形式String返回
    public static String readJsonFile() {

        String hdfsPath = "hdfs://120.24.6.0:9000/user/root/style_items.json";
        String result = "";
        Path path = new Path(hdfsPath);
        Configuration configuration = new Configuration();
        FSDataInputStream fsDataInputStream = null;
        FileSystem fileSystem = null;
        BufferedReader br = null;
        // 定义一个字符串用来存储文件内容
        try {
            fileSystem = path.getFileSystem(configuration);
            fsDataInputStream = fileSystem.open(path);
            br = new BufferedReader(new InputStreamReader(fsDataInputStream));
            String str2;
            while ((str2 = br.readLine()) != null) {
                // 遍历抓取到的每一行并将其存储到result里面
                result += str2 + "\n";
            }
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            if(br!=null){
                try {
                    br.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            if(fsDataInputStream!=null){
                try {
                    fsDataInputStream.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
            if(fileSystem!=null){
                try {
                    fileSystem.close();
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }

        return result;
    }

    //加载hdfs_data.properties文件来获取hdfs文件系统对象
    public static FileSystem getFileSystem() {
        //加载资源文件，resources下的hdfs_data.properties
        Properties properties2 = new Properties();
        FileSystem fs = null ;
        // 通过BufferedReader获取文件流
        InputStream is = Hdfs.class.getClassLoader().getResourceAsStream("hdfs_data.properties");
        Properties properties = new Properties();
        try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(is))) {
            properties2.load(bufferedReader);
            String hdfs_uri = properties2.getProperty("hdfs_uri");
            String hdfs_user = properties2.getProperty("hdfs_user");

            Configuration conf = new Configuration();// 加載配制文件
            conf.set("dfs.replication","1");  //设置文件副本数
            URI uri = new URI(hdfs_uri); // 要连接的资源位置

            fs = FileSystem.get(uri,conf,hdfs_user);  // 创建文件系统实例对象
        }catch (Exception e ){
            e.printStackTrace();
        }
        return fs;
    }

    //把爬虫生成的json文件上传到hdfs
    public static void uploadFiles(){

        // 通过BufferedReader获取文件流
        InputStream is = Hdfs.class.getClassLoader().getResourceAsStream("hdfs_data.properties");
        Properties properties = new Properties();
        try (BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(is))) {
            properties.load(bufferedReader);

            //上传 add_national.json
//            String in_add_national = properties.getProperty("in_add_national");
//            String out_add_national = properties.getProperty("out_add_national");
//            putFile(in_add_national,out_add_national);

            //上传car.json
            String in_car = properties.getProperty("in_car");
            String out_car = properties.getProperty("out_car");
            putFile(in_car,out_car);

            //上传complaints.json
            String  in_complaints = properties.getProperty("in_complaints");
            String out_complaints= properties.getProperty("out_complaints");
            putFile(in_complaints,out_complaints);

            //上传sub_car.json
            String in_sub_car = properties.getProperty("in_sub_car");
            String out_sub_car = properties.getProperty("out_sub_car");
            putFile(in_sub_car,out_sub_car);

            //上传car_grade.json
            String in_car_grade = properties.getProperty("in_car_grade");
            String out_car_grade = properties.getProperty("out_car_grade");
            putFile(in_car_grade,out_car_grade);

        } catch(IOException e){
            e.printStackTrace();
        }

    }

    //上传文件到 hdfs
    public static void putFile(String inputPath,String outputPath){
        FSDataOutputStream fsdos=null;  //  数据输出流
        FileInputStream fis=null;       //  输入流
        FileSystem fs = null;           //  hdfs文件系统对象

        try {
            fs = getFileSystem();
            fsdos = fs.create(new Path(outputPath));
            fis = new FileInputStream(inputPath);

            //--in:是FSDataInputStream类的对象，是有关读取文件的类，也就是所谓“输入流”
            //--out:是FSDataOutputStream类的对象，是有关文件写入的类，也就是“输出流”
            //--1024示用来拷贝的buffer大小（buffer是缓冲区）--缓冲区大小
            //-- 默认true - 是否关闭数据流，如果是false，就在finally里关掉
            if( fs.exists(new Path(outputPath)) ) {
                System.out.println("[  " + inputPath + "  ]" + "文件正在上传中........");
                IOUtils.copyBytes(fis, fsdos, 4096);
            }
            else
                System.out.println("[  " + inputPath + "  ]" +"文件已存在，请进行重命名操作后再进行上传！！！");
        } catch (IOException e) {
            System.out.println("[  " + inputPath + "  ]" +"文件上传出错.........");
            e.printStackTrace();
        }finally {
            try {
                if(fsdos != null)
                    fsdos.close();
                if(fis != null)
                    fsdos.close();
                if(fs != null)
                    fs.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
}

