#!/bin/bash
source_ip_list[1]=192.168.111.141
source_ip_list[2]=192.168.111.141
source_ip_list[3]=192.168.111.146
source_ip_list[4]=192.168.111.147
source_ip_list[5]=192.168.219.127
source_ip_list[6]=192.168.219.128
source_ip_list[7]=192.168.219.129

ip_timestamp[1]=0;
ip_timestamp[2]=0;
ip_timestamp[3]=0;
ip_timestamp[4]=0;
ip_timestamp[5]=0;
ip_timestamp[6]=0;
ip_timestamp[7]=0;

datum=`date +%Y%m%d`
echo $datum

exec_interval=300

MAX_LOOP=1000

for (( k =0; k < MAX_LOOP ; k ++ ))
do

	earlier_timestamp=0;
	loop_gzip_delay=0;
	loop_awk_delay=0;
	loop_net_delay=0;

	exec_begin_time=$(date +%s)

	exec_server_sum=0;

	for i in "${!source_ip_list[@]}"; do 
 
		printf "%s\t%s\n" "$i" "${source_ip_list[$i]}"

		exec_each_server_begin_time=$(date +%s)

		source_ip=${source_ip_list[$i]};
		echo ${source_ip}:8088/${datum}

		curl -L http://${source_ip}:8088/${datum}/ > ${source_ip}_list.log

		eval $(gawk -f get_last_gz_url.awk  ${source_ip}_list.log )

		if [ ${earlier_timestamp} -lt ${last_gz_timestamp}  ];then
   			earlier_timestamp=${last_gz_timestamp} 
		fi  


		#if different with the previous one, process the previous merged fbuffer data

		echo "ip_timestamp[" ${i} "]=" ${ip_timestamp[$i]}
		echo "last_gz_timestamp=" ${last_gz_timestamp}
		
		if [ ${ip_timestamp[$i]} -lt ${last_gz_timestamp}  ];then

			exec_net_begin_time=$(date +%s)

			wget -P data/  -c -t30 -T30 -w5 http://${source_ip}:8088/${datum}/${last_gz_timestamp}.gz

			exec_net_end_time=$(date +%s)
			exec_net_duration=$(($exec_net_end_time-$exec_net_begin_time))
			loop_net_delay=$(($loop_net_delay+$exec_net_duration))


			ip_timestamp[$i]=${last_gz_timestamp};

			exec_server_sum=$(($exec_server_sum+1))


			if [  -f "data/${last_gz_timestamp}.gz" ]; then

				exec_gzip_begin_time=$(date +%s)	
				gzip -f -d data/${last_gz_timestamp}.gz

				exec_gzip_end_time=$(date +%s)
				exec_gzip_duration=$(($exec_gzip_end_time-$exec_gzip_begin_time))

				loop_gzip_delay=$(($loop_gzip_delay+$exec_gzip_duration))

				echo "gzip spent "${exec_gzip_duration}" seconds";

				if [  -f "data/${last_gz_timestamp}" ]; then

				exec_filter_fbuffer_begin_time=$(date +%s)
				mawk -f filter_fbuffer.awk data/${last_gz_timestamp} >> results/${last_gz_timestamp}.fbuffer
	
				exec_filter_fbuffer_end_time=$(date +%s)
				exec_filter_fbuffer_duration=$(($exec_filter_fbuffer_end_time-$exec_filter_fbuffer_begin_time))

				loop_awk_delay=$(($loop_awk_delay+$exec_filter_fbuffer_duration))
				echo "exec filter fbuffer spent "${exec_filter_fbuffer_duration}" seconds";

			fi	
			rm -f data/${last_gz_timestamp}

		fi
		rm -f data/${last_gz_timestamp}.gz
		fi
	
	exec_each_server_end_time=$(date +%s)
	exec_each_server_duration=$(($exec_each_server_end_time-$exec_each_server_begin_time))

	echo "Run for server["${i}"] "${source_ip}" spent "${exec_each_server_duration}" seconds"; 

done

echo "early_timestamp="${earlier_timestamp}"，last_timestamp="${last_gz_timestamp} 

exec_fbuffer_proc_begin_time=$(date +%s)
mawk -f fbuffer_realtime_proc.awk results/${earlier_timestamp}.fbuffer >>results/fbuffer_output 
exec_fbuffer_proc_end_time=$(date +%s)
exec_fbuffer_proc_duration=$(($exec_fbuffer_proc_end_time-$exec_fbuffer_proc_begin_time))

rm -f results/${earlier_timestamp}.fbuffer

loop_awk_delay=$(($loop_awk_delay+$exec_fbuffer_proc_duration))
echo "exec fbuffer_realtime_proc spent "${exec_fbuffer_proc_duration}" seconds";

exec_end_time=$(date +%s)
exec_duration=$(($exec_end_time-$exec_begin_time))

echo "each loop spent "${exec_duration}" seconds for "${exec_server_sum} "servers";

echo "net:"${loop_net_delay}" seconds; gzip:"${loop_gzip_delay}" seconds; awk:"${loop_awk_delay}" seconds"
sleep_delay=$((300 - $exec_duration))

echo "sleep_delay="${sleep_delay}

if [ 0 -lt $sleep_delay ];then
  echo "sleep "${sleep_delay}" seconds"
  sleep $sleep_delay
fi

done
