#! /bin/bash

APP=gmall
sqoop=/opt/bdp/sqoop/bin/sqoop

if [ -n "$2" ]; then
	do_date=$2
else 
	do_date=`date -d '-1 day' +%F`
fi

import_data(){
$sqoop import \
--connect jdbc:mysql://hadoop101:3306/$APP \
--username root \
--password 123456 \
--target-dir /origin_data/$APP/db/$1/$do_date \
--delete-target-dir \
--query "$2 and \$CONDITIONS" \
--num-mappers 1 \
--fields-terminated-by '\t' \
--compress \
--compression-codec lzop \
--null-string '\\N' \
--null-non-string '\\N'

hadoop jar /opt/bdp/hadoop-3.1.3/share/hadoop/common/hadoop-lzo-0.4.20.jar  
com.hadoop.compression.lzo.DistributedLzoIndexer /origin_data/$APP/db/$1/$do_date
}



#! /bin/bash

case $1 in
"start"){
	for i in hadoop101 hadoop102
	do
		echo "-------------启动 $i 采集flume--------------"
		ssh $i "nohup /opt/bdp/flume/bin/flume-ng agent --conf-file /opt/bdp/flume/conf/file-flume-
		kafka.conf --name a1 -Dflume.root.logger=INFO,LOGFILE >/opt/bdp/flume/log1.txt 2>&1 &"
	done
};;

"stop"){
		for i in hadoop101 hadoop102
		do
				echo " ----------停止$i 采集flume--------------"
				ssh $i "ps -ef | grep file-flume-kafka | grep -v grep | awk '{print \$2}' |
				xargs -nl kill -9 "
		done
};;

esac



#! /bin/bash

case $1 in 
"start"){
		for i in hadoop103
		do 
			echo "---------------启动 $i 消费flume-------------"
			ssh $i "nohup /opt/bdp/flume/bin/flume-ng agent --conf-file /opt/bdp/flume/conf/kafka-flume-hdfs.conf --name a1 
			-Dflume.root.logger=INTO,LOGFILE >/opt/bdp/flume/log2.txt  2>&1 &"
		done
};;
"stop"){
		for i in hadoop103
		do 
				echo "---------停止$i消费flume---------------"
				ssh $i "ps -ef | grep kafka-flume-hdfs | grep -v grep | awk '{print \$2}' | xargs -nl kill"
		done	
};;
esac



































