#!/bin/bash
###
# @Author: apophis
# @File: sqoop.sh
# @Time: 2025/1/22 17:29
# @Description: 工程描述
###
now=$(date +"%Y-%m-%d %H:%M:%S")
# yesterday=$(date -d "-1 day ${now}" "+%Y-%m-%d")
cd "$(dirname "$0")" || exit
filepath=$(pwd)
echo "当前时间: ${now} *** 进入 ${filepath} 目录"
# here put the import lib
echo "开始上传文件"
hdfs dfs -mkdir -p /data/input/
hdfs dfs -rm -r /data/input/*
hdfs dfs -put account.txt /data/input/
hdfs dfs -put borrow.txt /data/input/
hdfs dfs -put dormitory.txt /data/input/
hdfs dfs -put dormitory_account.txt /data/input/
hdfs dfs -put pay.txt /data/input/
hdfs dfs -put repair.txt /data/input/
hdfs dfs -put role.txt /data/input/
hdfs dfs -put tower.txt /data/input/
hdfs dfs -ls /data/input/
echo "上传文件结束"


echo "开始进行数据清洗"
hadoop jar mapreduce-job-jar-with-dependencies.jar /data/input/account.txt /data/output/account/
hadoop jar mapreduce-job-jar-with-dependencies.jar /data/input/borrow.txt /data/output/borrow/
hadoop jar mapreduce-job-jar-with-dependencies.jar /data/input/dormitory.txt /data/output/dormitory/
hadoop jar mapreduce-job-jar-with-dependencies.jar /data/input/dormitory_account.txt /data/output/dormitory_account/
hadoop jar mapreduce-job-jar-with-dependencies.jar /data/input/pay.txt /data/output/pay/
hadoop jar mapreduce-job-jar-with-dependencies.jar /data/input/repair.txt /data/output/repair/
hadoop jar mapreduce-job-jar-with-dependencies.jar /data/input/role.txt /data/output/role/
hadoop jar mapreduce-job-jar-with-dependencies.jar /data/input/tower.txt /data/output/tower/
echo "数据清洗结束"
