#!/bin/bash

# 定义变量：数据同步日期，表示同步哪一条日志数据
# 第1、执行shell脚本时，传递参数
# 第2、如果没有传递参数，同步前一天数据
if [ -n "$1" ] ; then
  data_date=$1
else
  data_date=`date -d "-1 days" +%F`
fi

# 1-数据清洗
EVENT_LOG_CLEANED_SQL="
DROP TABLE IF EXISTS jtp_app_tmp.tmp_event_log_cleaned ;
CREATE TABLE jtp_app_tmp.tmp_event_log_cleaned
AS
SELECT account,
       appid,
       appversion,
       carrier,
       deviceid,
       devicetype,
       eventid,
       ip,
       latitude,
       longitude,
       nettype,
       osname,
       osversion,
       properties,
       releasechannel,
       resolution,
       sessionid,
       \`timestamp\`,
       dt
FROM jtp_app_ods.ods_event_app_log
WHERE dt = '${data_date}'
  AND deviceid IS NOT NULL AND length(deviceid) != 0
  AND eventid IS NOT NULL AND length(eventid) != 0
  AND sessionid IS NOT NULL AND length(sessionid) != 0
  AND properties IS NOT NULL AND size(properties) != 0
;
"


# 2-SESSION会话切割
EVENT_LOG_SESSION_SQL="
DROP TABLE IF EXISTS jtp_app_tmp.tmp_event_log_session ;
CREATE TABLE jtp_app_tmp.tmp_event_log_session
AS
SELECT
    account, appid, appversion, carrier, deviceid, devicetype, eventid
    , ip, latitude, longitude, nettype, osname, osversion, properties
    , releasechannel, resolution, sessionid, \`timestamp\`, dt
    , last_value(start_session_id, TRUE) OVER (PARTITION BY sessionid ORDER BY \`timestamp\`) AS new_session_id
FROM (
    SELECT
        *
        , if(diff_ts / 1000 / 60 >= 10, concat(sessionid, '_', \`timestamp\`), NULL) AS start_session_id
    FROM (
          SELECT
              *
              , (\`timestamp\` - lag(\`timestamp\`, 1, 0L) OVER (PARTITION BY sessionid ORDER BY \`timestamp\`)) AS diff_ts
          FROM jtp_app_tmp.tmp_event_log_cleaned
      ) t1
 ) t2
;
"


# 3-字段扩展
EVENT_LOG_AREA_SQL="
DROP TABLE IF EXISTS jtp_app_tmp.tmp_event_log_area ;
CREATE TABLE jtp_app_tmp.tmp_event_log_area
AS
SELECT
    t1.*
    , t2.province, t2.city, t2.district, t2.street
FROM jtp_app_tmp.tmp_event_log_session t1
    LEFT JOIN jtp_app_dim.dim_md_areas t2 ON jtp_app_dim.convert_geohash(latitude, longitude, 6) = t2.geo_hash
;
"

# 查询日志数据，插入明细事实表
EVENT_LOG_DETAIL_SQL="
INSERT OVERWRITE TABLE jtp_app_dwd.dwd_event_log_detail PARTITION (dt = '${data_date}')
SELECT
    account, appid, appversion, carrier, deviceid, devicetype, eventid
    , ip, latitude, longitude, province, city, district, nettype
    , osname, osversion, properties, releasechannel, resolution, new_session_id, \`timestamp\`
FROM jtp_app_tmp.tmp_event_log_area
;
"

# 执行SQL语句
/opt/module/spark/bin/beeline -u jdbc:hive2://node101:10001 -n bwie -e "${EVENT_LOG_CLEANED_SQL}${EVENT_LOG_SESSION_SQL}${EVENT_LOG_AREA_SQL}${EVENT_LOG_DETAIL_SQL}"

