#!/usr/bin/env bash

# config

# 读取生成列表文件的目录
list_dir="/data/webapps/parser.cms.duowan.com/generate-file-log/"
# 读取本地生成html文件的目录
local_base_dir=/data2/www/cms.duowan.com/
# 本地同步路径
local_sync_dir=/data1/www/cms.duowan.com/
# 日志路径
log_path=./sync_checker.log
# 同步那些专区
sync_subdomains="
qqxl.duowan.com
nba2k.duowan.com
dt2.duowan.com
d2.duowan.com
c9.duowan.com
rift.duowan.com
wot.duowan.com
pc.duowan.com
an.duowan.com
apple.duowan.com
mt.duowan.com
iphone.duowan.com
ipad.duowan.com
coc.duowan.com
nds.duowan.com
psp.duowan.com
newgame.duowan.com
aion.duowan.com
dnf.duowan.com	
df.duowan.com
wulin2.duowan.com
xunxian.duowan.com
jxsj.duowan.com
tx2.duowan.com 
tx3.duowan.com
dn.duowan.com
jf.duowan.com
xyq.duowan.com
xa.duowan.com
jx3.duowan.com
zx.duowan.com
zt2.duowan.com
bns.duowan.com
ssf4.duowan.com
tv.duowan.com
mhp3.duowan.com
mhp4.duowan.com
much.duowan.com
nds.duowan.com
3ds.duowan.com
psv.duowan.com
"


check_paths="221.204.223.154:/data/webapps/:nginx_fail_handler"

# 要检查的服务器
# ip:base_dir:fail_handler
paths="221.204.223.154:/data/webapps/:nginx_fail_handler
113.108.228.198:/data/webapps/:nginx_fail_handler
183.61.6.23:/data/webapps/:nginx_fail_handler
119.188.90.76:/data/webapps/:nginx_fail_handler
115.238.171.143:/data/webapps/:nginx_fail_handler
218.60.98.93:/data/webapps/:nginx_fail_handler
115.238.171.183:/data/webapps/:nginx_fail_handler
"

sync_servers="221.204.223.154
113.108.228.198
183.61.6.23
119.188.90.76
115.238.171.143
218.60.98.93
115.238.171.183
183.61.143.112
"

 
squids="58.215.175.226
182.118.1.98
221.204.223.155
221.204.223.152
121.9.240.35
113.108.228.199
"

# code

# 保存上次函数的返回值。没有返回值的函数不应该修改这个对象。
last_result=''
# 保存系统时间，用来轮转日志，除了write_log函数，其他函数不能修改。
last_date=`date +%s`

# 从类似qqxl/index.html的文件名中得到对应url的Host。比如
# qqxl/index.html对应的Host是qqxl.duowan.com
function get_subdomain(){
	filename=$1
	last_result=${filename%%/*}.duowan.com
}

# 从类似qqxl/index.html的文件名中得到对应url的path。比如
# qqxl/index.html对应的path是/index.html
function get_path(){
	filename=$1
	left=${filename%%/*}
	num=${#left}
	path=${filename:num}
	last_result=$path
}

# Nginx机器检查失败后的处理函数
# 参数：$1 ip
#	$2 url (lol.duowan.com/index.html)
function nginx_fail_handler(){
	url=$1	
	ip=$2

        work_dir=`pwd`
        cd $local_sync_dir
        nohup rsync -vrz -R $url release@$ip::release_code/ --password-file=/etc/rsyncd_users 
        cd $work_dir
        write_log "rsync -vrz -R $url release@$ip::release_code/ --password-file=/etc/rsyncd_users  1>sync.out 2>&1 &"
}

# 刷新全部squid机器
# 参数：$1 url (http://lol.duowan.com/index.html)
function flush_squid(){
	url=$1
	for ip in $squids 
	do
		squid_flush_handler $ip $url
	done
}


# 刷新Squid的处理函数
# 参数：$1 ip
#	$2 url (http://lol.duowan.com/index.html)
function squid_flush_handler(){
	ip=$1	
	url=$2	

	echo "squid_fail_handler: $ip $url"
	command="squidclient -m PURGE -h $ip -p 80 $url"
	$command
	result=$?
	write_log "execute: $command result: $result"
}

# 会自动在行首加入时间
function write_log(){
	line=$1

	current_day=`date +%d`
	last_day=`date +%d -d "@$last_date"`

	if [ $current_day -ne $last_day ]
	then
		last_day_string=`date +"%Y-%m-%d" -d "@$last_date"`
		#mv $log_path $log_path.$last_day_string
		echo "" > $log_path
		last_date=`date +%s`
	fi

	date_string=`date +"%Y-%m-%d %H:%M:%S"`
	echo -e "$date_string: $line" >> $log_path
}

# 检查远程服务器上文件是否存在
# 参数：$1 full_path_of_file
#       $2 ip
function remote_file_exist(){
	filename=$1
	host=$2
	last_result='yes'

	pwd=`pwd`
	identity="$pwd/Identity"
	exist=`./remote_execute.expect $identity $host ls $filename 2>/dev/null` 
	pattern="No such file or directory"
	exist=`echo -e "$exist" | grep "$pattern"`
	if [ -z "$exist" ]
	then
		last_result='yes'
	else
		last_result='no'
	fi
}

# 计算远端文件的md5值
# 参数：$1 full_path_of_file
#       $2 ip
function remote_file_md5sum(){
	filename=$1
	host=$2
	pwd=`pwd`
	identity="$pwd/Identity"

	md5sum=`./remote_execute.expect $identity $host md5sum $full_filename` 
	md5sum=`echo -e "$md5sum" | tail -1 | head -1 | awk '{print $1}'`
	last_result=$md5sum
}

# 检查文件的同步情况，通过md5sum判断文件是否一致。
# 参数：$1 filename 比如：qqxl/index.html
function check_file(){
	filename=$1
	get_subdomain $filename
	subdomain=$last_result
	get_path $filename
	path=$last_result

	for ip in $sync_servers
	do
		nginx_fail_handler "$subdomain$path" "$ip"
	done
}

# 检查列表文件中记录的文件的同步情况
function check_list(){
	list_filename=$1
	for line in `cat $list_filename`
	do
		get_subdomain $line
		subdomain=$last_result

		unchecked="yes"
		for s in $sync_subdomains
		do
			if [ "$s" == "$subdomain" ]
			then
				check_file $line
				unchecked="no"
			fi
		done
	done
}

while [ 1 ]
do
	# 遍历list_dir，去掉一分钟内生成的列表文件，逐个检查
	for list_filename in `ls $list_dir`
	do
		generate_date=$list_filename
		list_filename=$list_dir$list_filename

		date=`date +%s`
		date=$(($date - 120))
		generate_time_string=`date -d @$generate_date`
		if [ $generate_date -gt $date ]
		then
			check_list $list_filename
		fi
	done
done

