package movie.sh;

import cn.hutool.db.Db;
import com.mysql.cj.jdbc.MysqlDataSource;
import luculent.HdfsUtils;
import movie.py.MovieMr;
import movie.util.HiveJDBCUtil;
import movie.util.SqoopUtil;

import javax.sql.DataSource;
import java.sql.SQLException;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;

public class Movie {

    static DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd");

    static String drop1 = " DROP TABLE IF EXISTS movie_weekday_peak ";
    static String create1 = "CREATE TABLE if not exists movice_weekday_peak ( "  +
            "  id int(11) NOT NULL AUTO_INCREMENT," +
            "  weekday int(11) NOT NULL DEFAULT 0 COMMENT '工作日(周一~周天)', " +
            "  `uno` int(11) NOT NULL DEFAULT 0 COMMENT '峰值', " +
            "  PRIMARY KEY (`id`) " +
            ") ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='电影浏览器峰值表' ";
    static String create2 = "CREATE TABLE  if not exists `movie_score_rank10` (" +
            "  `id` int(11) NOT NULL AUTO_INCREMENT," +
            "  `movie_id` int(11) NOT NULL DEFAULT 0 COMMENT '电影id'," +
            "  `movie_num` int(11) NOT NULL DEFAULT 0 COMMENT '评分'," +
            "  PRIMARY KEY (`id`)" +
            ") ENGINE=InnoDB DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='电影浏览器峰值表';";

    static String filePath = "F:/writers.csv";

    public static void main(String[] args) throws Exception {
        LocalDate date  = LocalDate.now();
        String time = formatter.format(date);
        /*
        * #前期准备
        #1、准备u.date数据文件 */
        MovieMr.init(filePath);
        //2、根据日期 yyyyMMdd 在hdfs上创建 通用层(RAW)，缓冲层(ODS)，聚合层(FCT)

        HdfsUtils.mkdir("/data/inner/RAW/"+time);
        HdfsUtils.mkdir("/data/inner/ODS/"+time);
        HdfsUtils.mkdir("/data/inner/FCT/"+time);

        //3、为每个需求创建一个mysql表
        Db.use().execute(create1);
        Db.use().execute(create2);

        //需求1 根据用户观看的日期的时间分析在一周中哪一天观看电影的用户最多，也就是电影浏览量峰值
        //需求2 统计分析电影评分排行榜TOP10（后续可以对用户进行电影推荐）

        //#数据清洗：

        //1、将mvice.csv上传至hdfs集群"
        HdfsUtils.uploadLocalFile2HDFS(filePath, "/data/inner/RAW/"+time+"/mvice.csv");

        //#数据分析
        //1.1、hive创建inner_ods_01_movie表 映射缓冲层（ODS）
        HiveJDBCUtil.createDatabase("movie");
        String movie1 = "CREATE TABLE IF NOT EXISTS inner_ods_01_movie(" +
                "`userid` INT comment '用户的ID', " +
                " `movie` INT comment '观看电影的ID', " +
                " `rate` INT comment '用户对于电影的评分', " +
                " `weekday` INT comment '观看电影的日期'" +
                " )" +
                "ROW format delimited fields terminated BY ',' " +
                "lines terminated BY '\n' LOCATION '/data/inner/ODS/"+time+"/movie'";
        HiveJDBCUtil.createTable(movie1);

        //1.2、将hdfs movice.csv文件导入inner_ods_01_movie表
        HiveJDBCUtil.loadData("/data/inner/RAW/"+time+"/mvice.csv","inner_ods_01_movie");

        //2、进行需求分析，每个需求各创建一个临时表(inner_fct_01_movie01,inner_fct_01_movie02)

        //# 需求1 根据用户观看的日期的时间分析在一周中哪一天观看电影的用户最多，也就是电影浏览量峰值。
        String  xq1 = "CREATE TABLE IF NOT EXISTS inner_fct_01_movie01 AS " +
                "SELECT `weekday`, " +
                "       count(1) uno " +
                " FROM inner_ods_01_movie " +
                " GROUP BY `weekday` " +
                " ORDER BY uno DESC ";

        HiveJDBCUtil.execute(xq1);

        //# 需求2 统计分析电影评分排行榜TOP10（后续可以对用户进行电影推荐）
        String xq2 = "CREATE TABLE IF NOT EXISTS inner_fct_01_movie02 AS " +
                " SELECT mtop.`movie_id`, " +
                "       iom01.`maxrate` " +
                " FROM " +
                "  (SELECT iom02.`movie` movie_id, " +
                "          sum(`rate`) msum " +
                "   FROM inner_ods_01_movie iom02 " +
                "   GROUP BY `movie` " +
                "   ORDER BY msum DESC LIMIT 10) mtop " +
                " LEFT JOIN " +
                "  (SELECT movie, " +
                "          max(`rate`) maxrate " +
                "   FROM inner_ods_01_movie " +
                "   GROUP BY movie) iom01 ON mtop.movie_id = iom01.movie ";

        HiveJDBCUtil.execute(xq2);

        //#3、将表导出至hdfs上，根据日期创建hdfs FCT层 目录
        //#将业务1导出到hdfs上
        String dc1 ="INSERT overwrite directory '/data/inner/FCT/"+time+"/movie01' ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' " +
                "SELECT *  FROM inner_fct_01_movie01";

        HiveJDBCUtil.execute(dc1);

        //将业务2导出到hdfs上
        String dc2 ="INSERT overwrite directory '/data/inner/FCT/"+time+"/movie02' ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' " +
                "SELECT * FROM inner_fct_01_movie02";
        HiveJDBCUtil.execute(dc2);

        //到入到mysql
        SqoopUtil.export("movice_weekday_peak","movie01",new String[]{"weekday","uno"});
        SqoopUtil.export("movie_score_rank10","movie02",new String[]{"movie_id","movie_num"});

    }
}
