#!/bin/bash
# Sync cache service
# @auth: tinhnv1@fpt.net
# front server list
servers[1]="synchtml@180.148.142.80:/data/www/public_html/beta.thethao.vnexpress.net/public"
servers[2]="synchtml@180.148.141.130:/data/www/public_html/beta.thethao.vnexpress.net/public"
# cache configuration
doc_root="/data/www/public_html/beta.thethao.vnexpress.net"
public_url="http://beta.thethao.vnexpress.net"
public_dir="$doc_root/public"
cache_dir="$public_dir/tin-tuc/"
base_dir="$doc_root/server/Job"
list_folders="$base_dir/logs/vnett_folders.txt"
list_files="$base_dir/logs/vnett_files.txt"
file_limit=500
# gearman configuration
gearman_client=/build/gearmand/bin/gearman
gearman_host=180.148.142.86
gearman_port=4730
gearman_func=vnett_sync_cache

# logic processs
while : ; do
	# listen gearman job
	input=`$gearman_client -w -h $gearman_host -p $gearman_port -N -c 1 -f $gearman_func`
	# parse data	
	data=`echo $input|awk -F\" '{print $2}'`
	if [ -n "$data" ]; then
		# parse input data
		separator=":vnett:"
		type=`echo $data|awk -F$separator '{print $1}'`
		url=`echo $data|awk -F$separator '{print $2}'`
		path=`echo $data|awk -F$separator '{print $3}'`
		# append url to end of file
		sed -i 's|$url||g' $list_files
		sed -i '/^$/d' $list_files
		if [ "$type" == "add" ]; then
			echo $url >> $list_files
			tail $list_files -n $file_limit > $base_dir/logs/list_files_new.txt
			mv "$base_dir/logs/list_files_new.txt" $list_files
			echo "add line OK"
		else
			echo "delete line OK"
		fi
		# empty cached
		rm -Rf $cache_dir*
		rm -f "$public_dir/index.html"
		
		# generate popular folders
		folders=`cat $list_folders`
		for folder in $folders
		do
			rs=`curl -s "$public_url$folder/?pc=1"`
			d_path="$public_dir$folder"
			if [ -d "$d_path" ]; then
				echo "Folder exported : $d_path"
			else
				echo "Folder export fail: $d_path"
			fi
		done
		
		# generate 
		files=`cat $list_files`
		for file in $files
		do
			rs=`curl -s $file?pc=1`
			f_path=${file//$public_url/$public_dir}
			if [ -f "$f_path" ]; then
				echo "File exported: $file"
			else
				echo "File export fail: $file"
			fi
		done
		
		#sync to front server
		pc=`curl -s $public_url?pc=1`
		
		if [ -f "$path" ]; then
			echo "File exported: $public_url"
			#sync to front server
			for server in ${servers[*]}
			do
				echo $server
				rsync -avuzr --delete --ignore-errors -e "ssh -i /home/synchtml/.ssh/id_rsa" $cache_dir $server/tin-tuc
				if [ -f "$public_dir/index.html" ]; then
					rsync -avuzr --delete --ignore-errors -e "ssh -i /home/synchtml/.ssh/id_rsa" $public_dir/index.html $server
				fi
			done
		fi
	else
		echo "Waiting job..."
		sleep 10
	fi
done