package com.yuanshi.faceword

import org.apache.spark.sql.SparkSession

object CreateTable {

  def main(args: Array[String]): Unit = {
    val spark: SparkSession = SparkSession.builder().master("local[*]").appName("CreateTable").getOrCreate()

    //颜世界内容运营_小象部分
    spark.sql("drop table if exists faceword.elephant_user_dayact")
    spark.sql("create table faceword.elephant_user_dayact(" +
      "elephant_user_dayact int comment '小象app的全站日活'" +
      "partitioned by (dt String) stored as orc)")

    spark.sql("drop table if exists faceword.user_usernew_seven")
    spark.sql("create table faceword.user_usernew_seven(" +
      "7cnts int comment '7天内新登录用户当日活跃数'" +
      "partitioned by (dt String) stored as orc)")

    spark.sql("drop table if exists faceword.fw_core")
    spark.sql("create table faceword.fw_core(" +
      "show_cnts int comment '颜世界展现日活'" +
      ",exposure_cnts int comment '颜世界曝光量'" +
      ",click_cnts int comment '颜世界点击量'" +
      ",effect_click_cnts int comment '颜世界有效点击量'" +
      ",effect_click_rate double comment '颜世界有效点击率'" +
      ",ctr double" +
      ",read_avg double comment '人均阅读'" +
      ",real_read_avg double comment '人均真实阅读'" +
      ",effect_read_avg double comment '人均有效阅读'" +
      ",time_use_avg double comment '人均使用时长'" +
      "partitioned by (dt String) stored as orc)")

    //颜世界内容类型数据部分
    spark.sql("drop table if exists faceword.faceword_video_target")
    spark.sql("create table faceword.faceword_vedio_target(" +
      "video_exposure_cnts int comment '视频曝光'" +
      ",video_vv int comment '视频vv'" +
      ",video_uv int comment '视频uv'" +
      ",video_ctr int comment '视频ctr'" +
      ",read_user_avg double comment '人均阅读量'" +
      ",real_read_user_avg double comment '人均真实视频阅读')")

    spark.sql("drop table if exists faceword.faceword_picture_target")
    spark.sql("create table faceword.faceword_vedio_target(" +
      "picture_exposure_cnts int comment '图文曝光'" +
      ",picture_pv int comment '图文pv'" +
      ",picture_uv int comment '图文uv'" +
      ",picture_ctr int comment '图文ctr'" +
      ",read_user_avg double comment '人均阅读量'" +
      ",real_read_user_avg double comment '人均真实图文阅读')")









    spark.close()
  }
}
