#!/bin/bash

# 定义变量：数据同步日期，表示同步哪一条日志数据
# 第1、执行shell脚本时，传递参数
# 第2、如果没有传递参数，同步前一天数据
if [ -n "$1" ] ; then
  data_date=$1
else
  data_date=`date -d "-1 days" +%F`
fi

# 加载数据语句
DIM_MD_AREAS_SQL="
DROP FUNCTION IF EXISTS jtp_app_dim.convert_geohash ;

CREATE FUNCTION jtp_app_dim.convert_geohash
    AS 'net.bwie.jtp.qpp.udf.ConvertGeoHash'
    USING JAR 'hdfs://node101:8020/warehouse/app_jars/zg6-jtp-log-1.0.0.jar';

WITH tmp_area AS (
    SELECT
        ID, AREANAME, PARENTID, SHORTNAME, LEVEL, FLAG, WGS84_LNG, WGS84_LAT, GCJ02_LNG, GCJ02_LAT, BD09_LNG, BD09_LAT
    FROM jtp_app_ods.ods_md_areas
    WHERE dt = '${data_date}'
)
INSERT OVERWRITE TABLE jtp_app_dim.dim_md_areas PARTITION (dt = '${data_date}')
SELECT
    t6.ID AS id
     , t5.province, t5.city, t5.district
     , t6.AREANAME AS street
     , t6.BD09_LNG
     , t6.BD09_LAT
     , jtp_app_dim.convert_geohash(BD09_LAT, BD09_LNG, 6) AS geo_hash
FROM (
         SELECT
             t3.province
              , t3.city
              , t4.AREANAME AS district
              , t4.ID
         FROM (
                  SELECT
                      t1.province
                       , t2.ID
                       , t2.AREANAME AS city
                  FROM (
                           SELECT
                               ID
                                , AREANAME AS province
                           FROM tmp_area
                           WHERE PARENTID = 0
                       ) t1
                           LEFT JOIN tmp_area t2 ON t1.ID = t2.PARENTID
              ) t3
                  LEFT JOIN tmp_area t4 ON t3.ID = t4.PARENTID
     ) t5
         LEFT JOIN tmp_area t6 ON t5.ID = t6.PARENTID
WHERE t6.BD09_LAT IS NOT NULL  AND t6.BD09_LNG IS NOT NULL
;
"

# 执行SQL语句
/opt/module/spark/bin/beeline -u jdbc:hive2://node101:10001 -n bwie -e "${DIM_MD_AREAS_SQL}"

