package com.huike.stock_analysis.hive;

import java.io.*;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;

public class DataProcessing {
    private final Runtime runtime = Runtime.getRuntime(); // 获取运行环境
    private final boolean develop = true; //判断是否为开发环境，开发则打印命令，非开发则运行命令
    private final String data_path = "datas/"; // 爬虫爬取的数据的路径
    private String updateDate; //保存当前更新日期（防止爬取或计算时已经到了明天）

    // 获取当前时间字符串
    private String getNowTime() {
        return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date());
    }

    // 将表上传到hive对应的hadoop文件夹下
    private void upload() throws IOException, InterruptedException {
        // hive表在hadoop中的路径
        String hive_path = "/user/hive/warehouse/";
        // 所有爬虫表对应的hive表名
        String[] table_names = {"ads_dbfx", "ads_jszb", "ads_zjtj1", "ads_zyzb", "line_jszb", "line_zjtj1"};

        // 将数据上传到hadoop中的hive路径下
        for (String table_name : table_names) {
            if (develop) {
                System.out.println(String.format("hdfs dfs -put -f %s %s", data_path + table_name + "_" + updateDate + ".csv"
                        , hive_path + table_name));
            } else {
                runtime.exec(new String[]{"/bin/sh", "-c",
                        String.format("hdfs dfs -put -f %s %s", data_path + table_name + "_" + updateDate + ".csv"
                                , hive_path + table_name)}).waitFor();
            }
        }
    }

    // 对爬取的新闻数据进行文本处理并导出到mysql表中
    private void dealNews() throws Exception {
        // 将计算结果上传到hadoop的命令
        String put_cmd = "hdfs dfs -put -f /tmp/out/ads_news/000000_0 /tmp/out/ads_news/000000_0";
        // 运行的sqoop命令
        String sqoop = "sqoop export --connect 'jdbc:mysql://101.133.166.202:3306/stock_analysis?useUnicode=true&characterEncoding=utf-8'" +
                " --username root --password root --export-dir '/tmp/out/ads_news' --table ads_news -m 1 --fields-terminated-by '`'";

        if (develop) {
            System.out.println(put_cmd);
            System.out.println(sqoop);
        } else {
            // 读取今日爬取的新闻数据文件，文件不存在则说明今日无新数据，直接返回
            File file = new File(data_path + "ads_news_" + updateDate + ".csv");
            if (!file.exists()) {
                return;
            }
            BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8"));
            String line; //读取的爬虫文件一行数据
            String[] data_list = {null}; //处理的一条数据的结果数组
            // 构建FileOutputStream对象,文件不存在会自动新建
            FileOutputStream fileOutputStream = new FileOutputStream("/tmp/out/ads_news/000000_0");
            // 构建OutputStreamWriter对象,参数可以指定编码,默认为操作系统默认编码,windows上是gbk
            OutputStreamWriter writer = new OutputStreamWriter(fileOutputStream, "UTF-8");

            // 处理爬虫文件，将结果写到另一个文件中
            while ((line = bufferedReader.readLine()) != null) {
                if (data_list[0] == null) {
                    data_list = line.split(",", 6);
                    data_list[5] = data_list[5].substring(1) + "\\N";
                } else {
                    if (line.endsWith("ads_news")) {
                        data_list[5] += line.substring(0, line.length() - 10);
                        for (int i = 0; i < data_list.length - 1; i++) {
                            writer.write(data_list[i] + "`");
                        }
                        writer.write(data_list[data_list.length - 1] + "\n");
                        data_list[0] = null;
                    } else {
                        data_list[5] += line + "\\n";
                    }
                }
            }
            writer.close();
            fileOutputStream.close();
            bufferedReader.close();

            // 将新闻数据文件处理结果上传到hadoop并使用sqoop导出到mysql
            runtime.exec(new String[]{"/bin/sh", "-c", put_cmd}).waitFor();
            runtime.exec(new String[]{"/bin/sh", "-c", sqoop}).waitFor();
        }
    }

    // 计算指标并将结果导出到MySQL
    private void calculate() throws IOException, InterruptedException {
        // 计算30天的平均涨跌幅的select语句，下面的资金统计sql需要用到（考虑到效率问题，真正计算的是近20条数据，约为30天）
        String pjzdf_select = "select ads_zjtj1.s_code,ads_zjtj1.`date`,avg(ads_zjtj1.ads_zdf)" +
                " over(partition by ads_zjtj1.s_code order by unix_timestamp(ads_zjtj1.`date`)" +
                " rows between 20 preceding and current row) as pjzdf from ads_zjtj1";
        // 计算指标的sql语句数组，程序的核心部分
        String[] sqls = {
                // 游建
                // 盈利能力sql计算语句
                "select ads_dbfx.jlr,ads_dbfx.yysr-ads_dbfx.yywsr,ads_zyzb.jbmgsy,ads_dbfx.yyjlrl/100,ads_dbfx.jzcsyl/100" +
                        ",ads_dbfx.s_code,ads_dbfx.`date`,ads_dbfx.s_name,avg(ads_dbfx.jlr) over (partition by ads_zyzb.code)" +
                        ",avg(ads_dbfx.yysr-ads_dbfx.yywsr) over (partition by ads_zyzb.code),avg(ads_zyzb.jbmgsy) over (partition by ads_zyzb.code)" +
                        ",avg(ads_dbfx.yyjlrl/100) over (partition by ads_zyzb.code),avg(ads_dbfx.jzcsyl/100) over (partition by ads_zyzb.code)" +
                        ",ads_zyzb.mll/100,ads_zyzb.jll/100,ads_zyzb.mgjzc,ads_dbfx.jlr/(ads_dbfx.cbze+ads_dbfx.qjfy),ads_dbfx.hbzj/ads_dbfx.jlr" +
                        " from ads_dbfx inner join ads_zyzb on ads_dbfx.s_code=ads_zyzb.s_code and ads_dbfx.`date`=ads_zyzb.`date`" +
                        " where ads_dbfx.`date`>date_sub('" + updateDate + "',100)",

                // 偿债能力sql计算语句
                "select ads_zyzb.`date`,ads_zyzb.s_code,ads_zyzb.s_name,ads_dbfx.ldzc/(ads_dbfx.fzze*ads_zyzb.ldzczfz/100)" +
                        ",ads_dbfx.fzze/ads_dbfx.zcze,ads_dbfx.fzze/(ads_dbfx.zcze-ads_dbfx.fzze),ads_dbfx.fldzc/(ads_dbfx.zcze-ads_dbfx.fzze)" +
                        ",(ads_dbfx.fzze*(1-ads_zyzb.ldzczfz/100))/ads_dbfx.zcze,ads_dbfx.hbzj/(ads_dbfx.fzze*ads_zyzb.ldzczfz/100)" +
                        ",avg(ads_dbfx.ldzc/(ads_dbfx.fzze*ads_zyzb.ldzczfz/100)) over (partition by ads_zyzb.code)" +
                        ",avg(ads_dbfx.fzze/ads_dbfx.zcze) over (partition by ads_zyzb.code),ads_zyzb.sdbl" +
                        " from ads_dbfx inner join ads_zyzb on ads_dbfx.s_code=ads_zyzb.s_code and ads_dbfx.`date`=ads_zyzb.`date`" +
                        " where ads_dbfx.`date`>date_sub('" + updateDate + "',100)",

                // 成长能力sql计算语句
                "select (now.jlr-last.jlr)/last.jlr,((now.zcze-now.fzze)-(last.zcze-last.fzze))/(last.zcze-last.fzze)" +
                        ",(now.yysr-last.yysr)/last.yysr,(now.zcze-last.zcze)/last.zcze,((now.yysr-now.yycb)-(last.yysr-last.yycb))/(last.yysr-last.yycb)" +
                        ",now.`date`,now.s_code,now.s_name,avg((now.jlr-last.jlr)/last.jlr) over (partition by now.code)" +
                        ",avg(((now.zcze-now.fzze)-(last.zcze-last.fzze))/(last.zcze-last.fzze)) over (partition by now.code)" +
                        ",avg((now.yysr-last.yysr)/last.yysr) over (partition by now.code),avg((now.zcze-last.zcze)/last.zcze) over (partition by now.code)" +
                        ",avg(((now.yysr-now.yycb)-(last.yysr-last.yycb))/(last.yysr-last.yycb)) over (partition by now.code),now.fldzc/now.zcze" +
                        " from ads_dbfx as now,ads_dbfx as last" +
                        " where now.s_code=last.s_code and now.`date`=last_day(date_add(last.`date`,365)) and now.`date`>date_sub('" + updateDate + "',100)",

                // 资金统计sql计算语句
                "select -(ads_zjtj1.ads_xdjlrje+ads_zjtj1.ads_zdjlrje+ads_zjtj1.ads_ddjlrje+ads_zjtj1.ads_cddjlrje)" +
                        ",ads_zjtj1.ads_xdjlrje+ads_zjtj1.ads_zdjlrje+ads_zjtj1.ads_ddjlrje+ads_zjtj1.ads_cddjlrje" +
                        ",-ads_zjtj1.ads_zljlrje,ads_zjtj1.ads_zljlrje,pjzdf.pjzdf" +
                        ",rank() over (partition by ads_zjtj1.code order by ads_zjtj1.ads_xdjlrje+ads_zjtj1.ads_zdjlrje+ads_zjtj1.ads_ddjlrje+ads_zjtj1.ads_cddjlrje)" +
                        ",count(*) over (partition by ads_zjtj1.code)" +
                        ",avg(ads_zjtj1.ads_xdjlrje+ads_zjtj1.ads_zdjlrje+ads_zjtj1.ads_ddjlrje+ads_zjtj1.ads_cddjlrje) over (partition by ads_zjtj1.code)" +
                        ",avg(ads_zjtj1.ads_zljlrje) over (partition by ads_zjtj1.code),avg(pjzdf.pjzdf) over (partition by ads_zjtj1.code)" +
                        ",ads_zjtj1.ads_zljlrjzb,-ads_zjtj1.ads_zljlrjzb,avg(ads_zjtj1.ads_zljlrje/abs(ads_zjtj1.ads_zljlrjzb)) over (partition by ads_zjtj1.code)" +
                        ",avg(-ads_zjtj1.ads_zljlrje/abs(ads_zjtj1.ads_zljlrjzb)) over (partition by ads_zjtj1.code)" +
                        ",ads_zjtj1.`date`,ads_zjtj1.s_code,ads_zjtj1.s_name,ads_zjtj1.name,ads_zjtj1.ads_xdjlrje,ads_zjtj1.ads_zdjlrje" +
                        ",ads_zjtj1.ads_ddjlrje,ads_zjtj1.ads_cddjlrje,ads_zjtj1.ads_zdf" +
                        " from ads_zjtj1 inner join (" + pjzdf_select + ") as pjzdf" +
                        " on ads_zjtj1.s_code=pjzdf.s_code and ads_zjtj1.`date`=pjzdf.`date` where ads_zjtj1.`date`='" + updateDate + "'",

                // 吴健辛
                // 运营能力sql计算语句
                "select ads_zyzb.`date`,ads_zyzb.s_code,ads_zyzb.s_name,chzzts,zzczzl,yszkzzts,ldbl,sdbl" +
                        ",jyxjlyysr,avg(chzzts) over(partition by ads_zyzb.code),avg(zzczzl) over(partition by ads_zyzb.code)" +
                        " from ads_dbfx inner join ads_zyzb on ads_dbfx.s_code=ads_zyzb.s_code and ads_dbfx.`date`=ads_zyzb.`date`" +
                        " where ads_zyzb.`date`>date_sub('" + updateDate + "',100)",

                // 风险规避sql计算语句,删掉了修改的三个指标
                "select ads_jszb.`date`,ads_jszb.s_code,ads_jszb.s_name," +
                        "-(0.717*((ldzc-fzze*ldzczfz)/zcze)+0.847*mgwfply+3.107*(mlr/zcze)+0.998*(yysr/zcze)),yszk/zcze," +
                        "ads_spjg/jbmgsy,ads_spjg/mgjzc from ads_zyzb inner join ads_dbfx on ads_zyzb.s_code=ads_dbfx.s_code" +
                        " and ads_zyzb.`date`=ads_dbfx.`date` inner join ads_jszb on ads_zyzb.s_code=ads_jszb.s_code " +
                        "where ads_jszb.`date`>=ads_zyzb.`date` and ads_jszb.`date`<last_day(date_add(ads_zyzb.`date`,85))" +
                        " and ads_zyzb.`date`>date_sub('" + updateDate + "',100)",

                // 技术指标sql计算语句，添加三个指标
                "select aux.ads_date,aux.ads_id,aux.ads_name,ads_ma5,ads_ma10,ads_ma20,ads_ma30" +
                        ",ads_ma60,ads_ma120,ads_boll_lower,ads_boll_upper,aux.ads_boll,aux.ads_zdf5,ads_ema12,ads_ema26" +
                        ",ads_ema12-ads_ema26,2*(ads_ema12-ads_ema26-aux.ads_spjg),aux_boll_rsv_rsi.ads_rsv,ads_kdj_k" +
                        ",ads_kdj_d,ads_kdj_j,ads_rsi,ads_kpjg,ads_zdf,ads_kpjg,aux.ads_spjg,ads_zgj,ads_zdj,ads_cjl" +
                        ",ads_cje,ads_zdf,ads_zde,ads_cjl20,ads_lsbdl,ads_hylsbdl from aux inner join aux_boll_rsv_rsi on aux.ads_id=aux_boll_rsv_rsi.ads_id" +
                        " and aux.ads_date=aux_boll_rsv_rsi.ads_date inner join aux_ema_kdj2 on aux.ads_id=aux_ema_kdj2.ads_id" +
                        " and aux.ads_date=aux_ema_kdj2.ads_date" +
                        " where aux.ads_date='" + updateDate + "'",
        };

        // 将结果导出的目标mysql表，与sqls一一对应
        String[] mysql_tables = {"ads_ylnl", "ads_changzhainengli", "ads_chengzhangnengli", "ads_zjtj1", "ads_yynl", "ads_fxgb", "ads_jszb"};
        // 各指标计算所依赖的爬虫表，与sqls一一对应
        String[][] basic_table = {{"ads_dbfx", "ads_zyzb"}, {"ads_dbfx", "ads_zyzb"}, {"ads_dbfx"}, {"ads_zjtj1"},
                {"ads_dbfx", "ads_zyzb"}, {"ads_dbfx", "ads_zyzb", "ads_jszb"}, {"ads_jszb", "line_jszb"}};
        // 计算结果导出到hadoop上的文件路径
        String results_path = "/tmp/out/";
        // 将计算结果保存到hadoop上的hive命令
        String insert = "insert overwrite local directory \"%s\" row format delimited fields terminated by \",\" %s;";
        // 将hadoop上的计算结果导出到mysql上的sqoop命令
        String sqoop = "sqoop export --connect 'jdbc:mysql://101.133.166.202:3306/stock_analysis?useUnicode=true&characterEncoding=utf-8'" +
                " --username root --password root --export-dir '%s' --table %s -m 1 --fields-terminated-by ','" +
                " --input-null-non-string '\\\\N' --update-key ads_id,ads_date --update-mode allowinsert";

        // 此处可以使用多线程提高运行效率，我也写了，但是考虑到服务器硬件条件，又将其删除了
        for (int i = 0; i < sqls.length; i++) {
            // 由于技术指标计算需要用到辅助表，因此需要多执行部分代码。由于技术指标计算量太大，服务器无法承受，因此不进行技术指标每日更新
            if (mysql_tables[i].equals("ads_jszb")) {
                StringBuilder assist_hive = new StringBuilder();
                //清空辅助表中数据，预防重复
                assist_hive.append("truncate table aux;");
                assist_hive.append("truncate table aux_boll_rsv_rsi;");
                assist_hive.append("truncate table aux_ema_kdj1;truncate table aux_ema_kdj2;");
                //很多不用计算或简单计算的值放在aux里
                assist_hive.append("insert into aux select ads_jszb.`date`,ads_jszb.s_code,ads_jszb.s_name,avg(ads_jszb.ads_spjg) over(partition by ads_jszb.s_code" +
                        " order by ads_jszb.`date` desc rows between current row and 4 following),avg(ads_jszb.ads_spjg)" +
                        " over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 9 following)" +
                        ",avg(ads_jszb.ads_spjg) over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 19 following)" +
                        ",avg(ads_jszb.ads_spjg) over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 29 following)" +
                        ",avg(ads_jszb.ads_spjg) over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 59 following)" +
                        ",avg(ads_jszb.ads_spjg) over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 119 following)" +
                        ",avg(ads_jszb.ads_spjg) over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 19 following)" +
                        ",avg(ads_jszb.ads_zdf) over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 4 following)" +
                        ",min(ads_jszb.ads_spjg) over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 8 following)" +
                        ",max(ads_jszb.ads_spjg) over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 8 following)" +
                        ",sum(ABS(ads_jszb.ads_spjg)) over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 13 following)" +
                        ",case when ads_jszb.ads_zdf<0 then 0 else ads_jszb.ads_zdf end,ads_jszb.ads_spjg,ads_jszb.ads_kpjg,ads_jszb.ads_zdf,ads_jszb.ads_zgj,ads_jszb.ads_zdj,ads_jszb.ads_cjl,ads_jszb.ads_cje,ads_jszb.ads_zde," +
                        "avg(ads_jszb.ads_cjl) over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 19 following)," +
                        "stddev(ads_jszb.ads_zdf) over(partition by ads_jszb.s_code order by ads_jszb.`date` desc rows between current row and 19 following)," +
                        "stddev(line_jszb.ads_zdf) over(partition by line_jszb.code order by line_jszb.`date` desc rows between current row and 19 following)" +
                        " from ads_jszb inner join line_jszb on ads_jszb.code=line_jszb.code and ads_jszb.`date`=line_jszb.`date`;");
                //以aux表中数据为基础，再需要一次计算的值放在这个表里。主要指标有boll、rsv、rsi
                assist_hive.append("insert into aux_boll_rsv_rsi select ads_date,ads_id,ads_name,ads_boll-2*stddev(ads_boll)" +
                        " over(partition by ads_id order by ads_date desc rows between current row and 19 following)" +
                        ",ads_boll+2*stddev(ads_spjg) over(partition by ads_id order by ads_date desc rows between" +
                        " current row and 19 following),(ads_spjg-ads_min)/(ads_max-ads_min)*100,ads_rsi_A/ads_rsi_AB*100" +
                        ",ads_spjg from aux;");
                //先将初始值导入到aux_ema_kdj2表中，后面需要反复计算
                assist_hive.append("insert into aux_ema_kdj2 select ads_date,ads_id,ads_name,ads_spjg,ads_spjg,50,50,0,ads_rsv" +
                        ",ads_spjg from aux_boll_rsv_rsi;");
                //开始计算ema、kdj等麻烦的数值
                for (int j = 1; j <= 5; j++) {
                    //反复计算可以减小误差
                    //清空aux_ema_kdj1中数据，让aux_ema_kdj2表中数据计算更新后导入。
                    assist_hive.append("truncate table aux_ema_kdj1;");
                    assist_hive.append("insert into aux_ema_kdj1 select ads_date,ads_id,ads_name" +
                            ",ads_spjg*(0.15385)+(0.85)*avg(ads_ema12) over(partition by ads_id" +
                            " order by ads_date desc rows between 1 following and 1 following)" +
                            ",ads_spjg*(0.07407)+(0.927)*avg(ads_ema26) over(partition by ads_id" +
                            " order by ads_date desc rows between 1 following and 1 following)" +
                            ",0.3333*ads_rsv+0.6667*avg(ads_kdj_k) over(partition by ads_id order by" +
                            " ads_date desc rows between 1 following and 1 following)" +
                            ",0.3333*ads_kdj_k+0.6667*avg(ads_kdj_d) over(partition by ads_id order by ads_date" +
                            " desc rows between 1 following and 1 following),3*ads_kdj_k-2*ads_kdj_d,ads_rsv,ads_spjg" +
                            " from aux_ema_kdj2;");
                    //清空aux_ema_kdj2中数据，让aux_ema_kdj1表中数据计算更新后导入
                    assist_hive.append("truncate table aux_ema_kdj2;");
                    assist_hive.append("insert into aux_ema_kdj2 select ads_date,ads_id,ads_name,ads_spjg*(0.15385)+(0.85)*avg(ads_ema12)" +
                            " over(partition by ads_id order by ads_date desc rows between 1 following and 1 following)" +
                            ",ads_spjg*(0.07407)+(0.927)*avg(ads_ema26) over(partition by ads_id order by ads_date desc" +
                            " rows between 1 following and 1 following),0.3333*ads_rsv+0.6667*avg(ads_kdj_k)" +
                            " over(partition by ads_id order by ads_date desc rows between 1 following and 1 following)" +
                            ",0.3333*ads_kdj_k+0.6667*avg(ads_kdj_d) over(partition by ads_id order by ads_date desc" +
                            " rows between 1 following and 1 following),3*ads_kdj_k-2*ads_kdj_d,ads_rsv,ads_spjg from aux_ema_kdj1;");
                }

                if (develop) {
                    System.out.println("hive:" + assist_hive);
                } else {
                    for (int j = 0; j < basic_table[i].length; j++) {
                        if (new File(data_path + mysql_tables[i] + "_" + updateDate + ".csv").exists()) {
                            runtime.exec(new String[]{"hive", "-e", String.valueOf(assist_hive)}).waitFor();
                            break;
                        }
                    }
                }
            }

            String hive_cmd = String.format(insert, results_path + mysql_tables[i], sqls[i]);
            String put_cmd = String.format("hdfs dfs -put -f %s %s", results_path + mysql_tables[i] + "/000000_0", results_path + mysql_tables[i] + "/000000_0");
            // 由于技术指标会多生成几个文件，因此多上传几个文件
            if (mysql_tables[i].equals("ads_jszb")) {
                put_cmd += String.format("\nhdfs dfs -put -f %s %s", results_path + mysql_tables[i] + "/000001_0", results_path + mysql_tables[i] + "/000001_0");
                put_cmd += String.format("\nhdfs dfs -put -f %s %s", results_path + mysql_tables[i] + "/000002_0", results_path + mysql_tables[i] + "/000002_0");
                put_cmd += String.format("\nhdfs dfs -put -f %s %s", results_path + mysql_tables[i] + "/000003_0", results_path + mysql_tables[i] + "/000003_0");
                put_cmd += String.format("\nhdfs dfs -put -f %s %s", results_path + mysql_tables[i] + "/000004_0", results_path + mysql_tables[i] + "/000004_0");
            }
            String sqoop_cmd = String.format(sqoop, results_path + mysql_tables[i], mysql_tables[i]);
            if (develop) {
                System.out.println("hive:" + hive_cmd);
                System.out.println(put_cmd);
                System.out.println(sqoop_cmd);
            } else {
                // 判断计算的mysql表所依赖的爬虫表是否更新，若未更新则不进行计算
                for (int j = 0; j < basic_table[i].length; j++) {
                    if (new File(data_path + basic_table[i][j] + "_" + updateDate + ".csv").exists()) {
                        System.out.println(getNowTime() + " " + mysql_tables[i] + " calculate start");
                        // 执行hive命令计算结果并将结果保存到本地，并等待命令结束（较大文件直接使用hive保存到hadoop会报错，因此先保存到本地）
                        runtime.exec(new String[]{"hive", "-e", hive_cmd}).waitFor();
                        // 将计算结果上传到hadoop，并等待命令结束
                        runtime.exec(new String[]{"/bin/sh", "-c", put_cmd}).waitFor();
                        // 执行sqoop命令将计算结果从hadoop导出到mysql上，并等待命令结束（此处可以不等待命令结束，但同样因为考虑到服务器硬件条件，因此等待）
                        runtime.exec(new String[]{"/bin/sh", "-c", sqoop_cmd}).waitFor();
                        System.out.println(getNowTime() + " " + mysql_tables[i] + " calculate over");
                        break;
                    }
                }
            }
        }
    }

    // 程序入口
    public void run() throws Exception {
        int update_hour = 11; //每日更新时间，原定每晚七点，但是老师服务器的时间不对，晚上七点时该服务器时间为中午十一点
        while (true) {
            Calendar calendar = Calendar.getInstance();
            // 判断是否到达今天更新时间，到达则更新数据，未到达则休眠到今天更新时间
            if (calendar.get(Calendar.HOUR_OF_DAY) < update_hour && !develop) {
                System.out.println(getNowTime() + " Waiting for update today.");
                Thread.sleep((((update_hour - calendar.get(Calendar.HOUR_OF_DAY)) * 60
                        - calendar.get(Calendar.MINUTE)) * 60 - calendar.get(Calendar.SECOND)) * 1000
                        - calendar.get(Calendar.MILLISECOND));
            } else {
                updateDate = new SimpleDateFormat("yyyy-MM-dd").format(calendar.getTime());
                // 运行爬虫程序并等待结束
                if (develop) {
                    System.out.println("scrapy crawl newData");
                } else {
                    System.out.println(getNowTime() + " Scrapy start");
                    runtime.exec(new String[]{"/bin/sh", "-c", "scrapy crawl newData"}).waitFor();
                    System.out.println(getNowTime() + " Scrapy over");
                }

                // 对爬取的新闻数据进行文本处理并导出到mysql表中
                dealNews();

                // 将爬取的数据上传到hive表在hadoop对应的文件夹中
                if (!develop) {
                    System.out.println(getNowTime() + " Upload start");
                }
                upload();

                // 计算各指标结果并上传到mysql
                if (!develop) {
                    System.out.println(getNowTime() + " Upload over");
                    System.out.println(getNowTime() + " Calculate start");
                }
                calculate();

                // 休眠，等待明天指定时间更新数据
                if (develop) {
                    break;
                } else {
                    System.out.println(getNowTime() + " Calculate over");
                    System.out.println(getNowTime() + " Message: Update completed! Waiting for the next update.");
                    calendar = Calendar.getInstance();
                    // 防止更新时服务器卡住或服务器硬件问题导致要到明天才能更新完毕，因此加一个判断，决定休眠时间
                    if (calendar.get(Calendar.HOUR_OF_DAY) < update_hour) {
                        Thread.sleep((((update_hour - calendar.get(Calendar.HOUR_OF_DAY)) * 60
                                - calendar.get(Calendar.MINUTE)) * 60 - calendar.get(Calendar.SECOND)) * 1000
                                - calendar.get(Calendar.MILLISECOND));
                    } else {
                        Thread.sleep(((((update_hour + 24) - calendar.get(Calendar.HOUR_OF_DAY)) * 60
                                - calendar.get(Calendar.MINUTE)) * 60 - calendar.get(Calendar.SECOND)) * 1000
                                - calendar.get(Calendar.MILLISECOND));
                    }
                }
            }
        }
    }

    public static void main(String[] args) throws Exception {
        new DataProcessing().run();
    }
}
