项目简介：计算客户存款、取款、转账等数据
把源数据从关系型数据库通过shell脚本封装sqoop抽取到hive中
rdbms-->sqoop-->hive ODS-->ETL-->hive DW-->>hive DM
编写数据字典：
见excel：
创建表格：
create table rdbms_cust_info(
cust_id   int,  
cust_name   varchar2(100),    
org_id    int,      
code_id   int,      
amt   number,       
dt    date,       
phone   char(11),       
acctno    number,       
yg_no   number);


create table rdbms_org_info(
org_id    int,        
org_name    varchar2(100),        
org_level   varchar2(100),        
org_addr    varchar2(100)     
);

create table rdbms_code_info(
    code_id   int,        
    code_type varchar2(100),        
    code_comment  varchar2(100)   
);

造数：见附页
--开始进行数据的全量抽取
用shell脚本封装sqoop语句把数据从rdbms抽取到hdfs中
--编写sqoop语句进行数据的全量抽取
#!/bin/bash
###编写人：2418全体
###编写日期：2025/2/20
###版本功能:实现Oracle跨库到hive的全量抽取 sqoop
j="jdbc:oracle:thin:@192.168.1.57:1521/ORCL"
u="scott"
p="123456"
l1="RDBMS_CUST_INFO"
l2="RDBMS_ORG_INFO"
l3="RDBMS_CODE_INFO"
db_1="ods18"
db_2="dw18"
echo "------------开始抽取客户基表的数据------------"
sqoop import \
--hive-import \
--connect ${j} \
--username ${u} \
--password ${p} \
--table ${l1} \
--hive-database ${db_1} \
--fields-terminated-by ',' -m 1
echo $?
echo "------------开始抽取机构表的数据------------"
sqoop import \
--hive-import \
--connect ${j} \
--username ${u} \
--password ${p} \
--table ${l2} \
--hive-database ${db_2} \
--fields-terminated-by ',' -m 1
echo $?
echo "------------开始抽取机业务码值表的数据------------"
sqoop import \
--hive-import \
--connect ${j} \
--username ${u} \
--password ${p} \
--table ${l3} \
--hive-database ${db_2} \
--fields-terminated-by ',' -m 1
echo $?
echo "开始数据的验证"
c=`hive -e "set hive.exec.mode.local.auto=true;select count(1) from ${db_1}.${l1}"`
c1=`hive -e "set hive.exec.mode.local.auto=true;select count(1) from ${db_2}.${l2}"`
c2=`hive -e "set hive.exec.mode.local.auto=true;select count(1) from ${db_2}.${l3}"`
echo "抽数完成，基表一共抽取：${c}条数据,机构表一共抽取：${c1}条数据,码表一共抽取:${c2}条数据"
echo "---开始创建dw层客户基表的落地表---------------"
hive -e "create table if not exists ${db_2}.${l1}(
cust_id   double,  
cust_name   string,    
org_id     double,      
code_id    double,      
amt   double,       
dt    string,       
phone   string,       
acctno    double,       
yg_no   double,
etl_time string
)
row format delimited fields terminated by ',';"
echo $?

--开始清洗ods层的数据
清洗规则：保留客户姓名中的中文和英文，保留手机号的数字
最后把hql封装在shell中
#!/bin/sh
echo "开始清洗客户基表，抽取到dw层"
hive -v -e "set hive.exec.mode.local.auto=true; \
insert overwrite table dw18.rdbms_cust_info
select 
    cust_id,  
    regexp_replace(cust_name,'[^一-龥|a-z|A-Z]+','') cust_name,    
    org_id,      
    code_id,      
    amt,       
    dt,       
    regexp_replace(phone,'[^0-9]+','') phone,       
    acctno,       
    yg_no,
    current_timestamp    etl_time
    from ods18.rdbms_cust_info;"
echo $?
echo "验证数据**************************"
c=`hive -e "set hive.exec.mode.local.auto=true;select count(1) from dw18.rdbms_cust_info"`
echo "dw层的数据量为:${c}条"
c1=`hive -e "set hive.exec.mode.local.auto=true;select count(1) from ods18.rdbms_cust_info"`
echo "ods层的数据量为:${c1}条"
if [ "$c" == "$c1" ];then
    echo "数据一致，清洗成功"
else
    echo "请检查数据"
fi

--DM层指标设计
mapping见附件

#!/bin/sh
sql="create table if not exists dm18.cust_hz(
org_name	string,				
sum_qk		decimal(22,6),				
sum_aqk		decimal(22,6),				
sum_ack		decimal(22,6),				
sum_ck		decimal(22,6),				
sum_dgzz	decimal(22,6),			
sum_dszz	decimal(22,6),				
sum_dzz		decimal(22,6),				
sum_lc		decimal(22,6),				
sum_dck		decimal(22,6),
etl_time string	
)row format delimited fields terminated by ',';"
hive -e "${sql}"
echo $?

--进行指标的计算
先编写hql，然后封装在shell中
#!/bin/bash
hive -v -e "set hive.exec.mode.local.auto=true; \
insert overwrite table dm18.cust_hz
select org_name,
        cast(sum(if(code_type='取款',amt,0)) as decimal(22,6)) sum_qk,
        cast(sum(if(code_type='ATM取款',amt,0)) as decimal(22,6)) sum_aqk,
        cast(sum(if(code_type='ATM存款',amt,0)) as decimal(22,6)) sum_ack,
        cast(sum(if(code_type='存款',amt,0)) as decimal(22,6)) sum_ck,
        cast(sum(if(code_type='对公转账',amt,0)) as decimal(22,6)) sum_dgzz,
        cast(sum(if(code_type='对私转账',amt,0)) as decimal(22,6)) sum_dszz,
        cast(sum(if(code_type='大额转账',amt,0)) as decimal(22,6)) sum_dzz,
        cast(sum(if(code_type='理财',amt,0)) as decimal(22,6)) sum_lc,
        cast(sum(if(code_type='大额存款',amt,0)) as decimal(22,6)) sum_dck,
        current_timestamp etl_time
        from
(select
        a.*,
        if(b.org_name is not null,b.org_name,'其他渠道') org_name,
        c.code_type
        from dw18.rdbms_cust_info a left join 
        dw18.RDBMS_ORG_INFO b on a.org_id=b.org_id
        left join 
        dw18.rdbms_code_info c on a.code_id=c.code_id) a
        group by org_name;"


--编写sqoop语句进行数据的导出，给报表组的同事提供数据进行报表的开发
create TABLE cust_hz(
org_name  varchar2(50), 
sum_qk    number,
sum_aqk   number,
sum_ack   number,
sum_ck    number,
sum_dgzz  number,  
sum_dszz  number,  
sum_dzz   number,
sum_lc    number,
sum_dck   number,
etl_time varchar2(50)
);

--编写shell脚本，封装sqoop导出的语句
#!/bin/sh
j="jdbc:oracle:thin:@192.168.1.57:1521/ORCL"
u="scott"
p="123456"
t="CUST_HZ"
dir="/user/hive/warehouse/dm18.db/cust_hz"
sqoop export \
--connect ${j} \
--username ${u} \
--password ${p} \
--table ${t} \
--export-dir ${dir} \
--fields-terminated-by ',' \
--lines-terminated-by '\n' \
--input-null-string '\\N' \
--input-null-non-string '\\N' 