#!/bin/bash
#数据源表名数组
tables=("AML_CUSTOMER_MASTER" "AML_ACCOUNT_MASTER" "AML_TRANSACTION_DETAIL" "AML_UBO_INFO" "AML_WATCHLIST_MASTER" "AML_WATCHLIST_DETAIL" "AML_SCREENING_RESULT" "AML_ALERT" "AML_SUSPICIOUS_TXN_REPORT" "AML_MONITORING_RULE")
# 获取传入的分区日期参数，默认为当前日期前一天
etl_date=$(date -d "yesterday" +%Y-%m-%d)
#循环导入数据
echo "开始"
for table in "${tables[@]}"
do
    sqoop import \
    --connect jdbc:oracle:thin:@192.168.4.89:1521:orcl \
    --username fxq \
    --password 123456 \
    --table "$table" \
    --hcatalog-database ods \
    --hcatalog-table "ods_$table" \
    --hive-partition-key etl_date \
    --hive-partition-value "$etl_date" \
    -m 1
done
echo "完成"


导出脚本
#!/bin/bash
# 定义要处理的表数组
arr=(khfx jyfx jghg yjgz)

# HDFS to Oracle
echo "开始执行 HDFS to Oracle 导出..."
for i in "${arr[@]}"; do
    sqoop export \
    --connect "jdbc:oracle:thin:@192.168.4.89:1521:orcl \
    --username "scott" \
    --password "scott" \
    --table "$i" \
    --export-dir /user/hive/warehouse/ads.db/dc_ms.txt \
    --input-fields-terminated-by ',' \
    -m 1
done
echo "导完了"

# Hive to Oracle
echo "开始执行 Hive to Oracle 导出..."
for i in "${arr[@]}"; do
    sqoop export \
    --connect "jdbc:oracle:thin:@192.168.4.89:1521:orcl \
    --username "scott" \
    --password "scott" \
    --table "$i" \
    --export-dir /user/hive/warehouse/ads.db/dc_ms.txt
done
echo "导完了"