#!/bin/bash
#set -x


#############################################################
## 每分钟检查一次, 如果需要，则重新静态化全部列表页
#############################################################

urlPrefix="http://trydev.test.flykobe.com/try/?v=iamnotstatic"

dir="static_data"
onlineDir="../static"
flagFile='./static_flag.txt' # save this static index, and can be used for singleon

function LockFile() {
	[ -e /dev/shm/${0##*/} ] && errorexit "Self already Run!"
	ln -s /proc/$$ /dev/shm/${0##*/}
	trap "Exit" 0 1 2 3 15 22 24
}
function Exit(){
	unlink /dev/shm/${0##*/} >/dev/null 2>&1
	exit 0;
}


function get_filename(){
	echo "$dir/try_$1_$2.htm"
}

function log(){
	time=$(date "+%Y-%m-%d %H:%M:%S")
	echo "[$time]$@" >&2
}

function error(){
	log "[error]$@"
}

function notice(){
	log "[notice]$@"
}

function errorexit(){
	error "$@";exit 1
}

function cCurl(){
	file="$1"
	url="$2"
	if [[ -z "$file" || -z "$url" ]];then
		notice "Need file and url!";
		return 1;
	fi
	notice "curl $url to $file"
	curl -o $file $url 2>/dev/null
}

# simple singleon
LockFile

# Get first page
firstfile=$(get_filename)
cCurl $firstfile "$urlPrefix"
if [ $? -ne 0 ];then
	errorexit "Get first page fail: $urlPrefix"
fi

# Check if need re-static: if online's index bigger than local
onlineIndex=$(perl -n -e 'if(m/var\s+static_must_need_index\s*=\s*"(\d+)"/){print $1;}' $firstfile)
if [ -z "$onlineIndex" ];then
	errorexit "Can't find static_must_need_index!"
fi
localIndex=$(cat $flagFile)
if [ -z "$localIndex" ];then
	localIndex=0
fi
if [ $onlineIndex -le $localIndex ];then
	# needn't to re-static
	exit
fi

# Calculate how many pages we need
totalPage=$(perl -n -e 'if(m/var\s+static_must_need_total_page\s*=\s*"(\d+)"/){print $1;}' $firstfile)
if [ $totalPage -lt 1 ];then
	errorexit "Total Page too small: $totalPage"
fi

# Calculate how many sort types we need
sortTypes=$(perl -n -e 'if(m/var\s+static_must_need_sort_type\s*=\s*"([\w\d\s]+)"/){print $1;}' $firstfile)
if [ -z "$sortTypes" ];then
	errorexit "Can't get sort types!"
fi


# Get all need spider urls
for s in $sortTypes;do
	for (( i=1; i<=$totalPage; i++));do
		if [ $i -eq 1 ];then
			file=$(get_filename "$s" "")
			url="$urlPrefix&s=$s&page=$i"

			cCurl "$file" "$url"
			if [ $? -ne 0 ];then
				errorexit "Get $url fail"
			fi
			curIndex=$(perl -n -e 'if(m/var\s+static_must_need_index\s*=\s*"(\d+)"/){print $1;}' $firstfile)
			if [ $curIndex -ne $onlineIndex ];then
				errorexit "Modify accur when static, quit"
			fi
		fi
		file=$(get_filename "$s" "$i")
		url="$urlPrefix&s=$s&page=$i"

		cCurl "$file" "$url"
		if [ $? -ne 0 ];then
			errorexit "Get $url fail"
		fi
		curIndex=$(perl -n -e 'if(m/var\s+static_must_need_index\s*=\s*"(\d+)"/){print $1;}' $firstfile)
		if [ $curIndex -ne $onlineIndex ];then
			errorexit "Modify accur when static, quit"
		fi
	done
done

# Move static pages online
cp $dir/* $onlineDir

# save this static index
echo $onlineIndex > $flagFile

notice "Finish!"

Exit
