#!/bin/bash
# 定义需要导入的表名数组
tables=("AML_CUSTOMER_MASTER" "AML_ACCOUNT_MASTER" "AML_TRANSACTION_DETAIL" "AML_UBO_INFO" "AML_WATCHLIST_MASTER" "AML_WATCHLIST_DETAIL" "AML_SCREENING_RESULT" "AML_ALERT" "AML_SUSPICIOUS_TXN_REPORT" "AML_MONITORING_RULE")
# 获取传入的分区日期参数，默认为当前日期前一天
etl_date=$(date -d "yesterday" +%Y-%m-%d)
# 遍历表名数组，逐个执行sqoop导入操作
echo "导入数据ing"
for table in "${tables[@]}"
do
# 导入参数
# 数据库连接参数
# 数据库用户名
# 数据库密码
# 表名
# hive数据库名
# hive表名
# 分区字段名
# 分区字段值
# 并行任务数
    sqoop import \
    -- connect jdbc:oracle:thin:@192.168.4.79:1521:orcl \
    -- username JY_USER \
    -- password 123456 \
    -- table "$table" \
    -- hcatalog-database ods \
    -- hcatalog-table "ods_$table" \
    -- hive-partition-key "etl_date" \
    -- hive-partition-value "$etl_date" \
    - m 1
done
echo "数据导入完成"


导出脚本
#!/bin/bash
# 定义要处理的表数组
arr=(khfx jyfx jghg yjgz)

# HDFS to Oracle
echo "开始执行 HDFS to Oracle 导出..."
for i in "${arr[@]}"; do
    sqoop export \
    --connect "jdbc:oracle:thin:@192.168.4.79:1521:orcl \
    --username "scott" \
    --password "scott" \
    --table "$i" \
    --export-dir /user/hive/warehouse/ads.db/dc_zjy.txt \
    --input-fields-terminated-by ',' \
    -m 1
done
echo "数据导出完成"

# Hive to Oracle
echo "开始执行 Hive to Oracle 导出..."
for i in "${arr[@]}"; do
    sqoop export \
    --connect "jdbc:oracle:thin:@192.168.4.79:1521:orcl \
    --username "scott" \
    --password "scott" \
    --table "$i" \
    --export-dir /user/hive/warehouse/ads.db/dc_zjy.txt
done
echo "数据导出完成"