-- @author wanglizhou
-- @date 2021.10.29
-- 小流量数据处理

use hdp_lbg_supin_zplisting;
set hive.auto.convert.join=false;
set hive.exec.dynamic.partition=true;
set hive.exec.dynamic.partition.mode=nonstrict;
set hive.exec.parallel=true;
set hive.exec.parallel.thread.number=8;
set mapreduce.job.queuename=root.offline.hdp_lbg_supin.normal;

add jar viewfs://58-cluster/home/hdp_lbg_supin/resultdata/zhaopin/dp/app/udf/udf_str2json_1.jar;
create temporary function tjfrom_format AS 'com.bj58.udf.JsonToMap01';

insert overwrite table ods_zp_search_supplement_all_d partition(dt='${dateSuffix}',source)
select
    url,
    get_json_object(tjfrom_format(tjfrom),'$.sid') ,
    get_json_object(tjfrom_format(tjfrom),'$.pid') ,
    displocal sloc,
    dispcate scate,
    uid as userid,
    useragent ua,
    imei,
    ip,
    logtime as stime,
    platform,
    refer httpreferer,
    os,
    id58 as cookieid,
    '-' as ptype,
    tjfrom,
    ad_id,
    get_json_object(infojs,'$.title') ad_title,
    get_json_object(infojs,'$.userid') ad_userid,
    get_json_object(infojs,'$.displocal') ad_local,
    get_json_object(infojs,'$.dispcate') ad_cate,
    get_json_object(infojs,'$.worklocal') ad_worklocal,
    get_json_object(infojs,'$.salary') ad_salary,
    get_json_object(infojs,'$.fuli') ad_welfare,
    get_json_object(infojs,'$.infoicon') ad_tag,
    get_json_object(infojs,'$.score') ad_score,
    get_json_object(infojs,'$.infotype') ad_disptype,
    get_json_object(infojs,'$.scoresource') ad_scoresource,
    get_json_object(infojs,'$.scoreabtest') ad_scoreabtest,
    get_json_object(infojs,'$.pos') ad_position,
    get_json_object(infojs,'$.videointer') ad_videointer,
    get_json_object(infojs,'$.infoadtypes') ad_producttypes,
    get_json_object(infojs,'$.liveness') ad_imonline,
    'hasResult' noresult_sid,
    'hasResult' noresult_slot,
    'hasResult' noresult_disptype,
    spm,
    utm_source utmsource,
    lon longitude,
    lat latitude,
    version versioncode,
    xforward,
    '-' search_abtest,
    '-' source_abtest,
    '-' entrance_param,
    '-' tag_param,
    '-' abtest_json,
    get_json_object(infojs,'$.pageno') pageno,
    get_json_object(infojs,'$.block_name') block_name,
    get_json_object(infojs,'$.lego_tid') ad_lego_tid,
    get_json_object(infojs,'$.item_type') ad_item_type,
    get_json_object(infojs,'$.make_up') ad_make_up,
    get_json_object(infojs,'$.abtest_json')  ad_abtest_json,
    '-' ad_tag_type,
    '-' ad_tag_cateid,
    '-' ad_tag_id,
    '-' ad_tag_name,
    '-' ad_tag_pos,
    '-' cuserportrait,
    abtest_string,
    infojs,
    rn,
    extend_json,
    source
from (
    select *,
        --当天取时间最小的tjfrom作为唯一，即尽量取到会话创建时候的tjfrom
        row_number() over(partition by uid,ad_id,sid order by logtime) as rn
        from(
        select
        url,
        sid,
        pid,
        displocal,
        dispcate,
        uid,
        useragent,
        imei,
        ip,
        logtime,
        platform,
        refer,
        os,
        id58,
        filterlist,
        spm,
        utm_source,
        lon,
        lat,
        version,
        xforward,
        source,
        abtest_string,
        extend_json,
        get_json_object(infojs,'$.infoid') ad_id,
        get_json_object(infojs,'$.tjfrom') tjfrom,
        infojs
    from hdp_lbg_supin_zplisting.raw_zp_search_supplement_all_d
    lateral view explode(infojson) v as infojs
    where dt='${dateSuffix}'
    --当接入其他数据源时需要开放以下条件，因为新接入的数据源部分字段取值逻辑与zhuzhan侧不同 and source in ('imcard','jlinfolist','gjqyinfolist','qyinfolist','liveinfolist')
    ) t
) t1
--where rn=1
;
