#!/usr/bin/env bash
# **  文件名称: ads_out_station_day_top.sh
# **  创建日期: 2020年8月22日
# **  编写人员: qinxiao
# **  输入信息:
# **  输出信息:
# **
# **  功能描述:地铁出战数据
# **  处理过程:
# **  Copyright(c) 2016 TianYi Cloud Technologies (China), Inc.
# **  All Rights Reserved.
#***********************************************************************************

#***********************************************************************************
#==修改日期==|===修改人=====|======================================================|
#
#***********************************************************************************



#获取脚本所在目录
shell_home="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"

#进入脚本目录
cd $shell_home

n='""'

day=$1


spark-sql \
--master yarn-client \
--num-executors 2 \
--executor-memory 2G \
--executor-cores 1 \
--conf spark.sql.shuffle.partitions=4 \
-e "

INSERT OVERWRITE TABLE ads.ads_stations_send_passengers_day_top PARTITION(DAY = '${day}')
select
concat(station ,'>',next_station) as short_stations,
count(1) as c
from (
select
card_no,
deal_date,
deal_type,
station,
LEAD(deal_type,1) over (partition by card_no order by deal_date) next_deal_type,
LEAD(station,1) over (partition by card_no order by deal_date) next_station
from
dwd.dwd_fact_szt_in_out_detail
where day='${day}'
) as a
where deal_type='地铁入站'
and next_deal_type='地铁出站'
and station <> '${n}'
and next_station <> '${n}'
group by concat(station ,'>',next_station)
order by c desc
limit 100

"







