#This poorly written script is offered to you by HorstWepper
#Copyright (C) 2007  HorstWepper

#CREDIT SECTION
#Bugfix pointed out by ShiningSun in the getGallPagesURL.awk script
#Bugfix pointed out by Tequilamb, Paul, belterone (_blank)
#Added an experimental resume function suggested by Tequilamb

#Name : Fap Grabr
#Version : 0.3.4
#This script uses the following GNU tools ported to win32
#(output of the ls command in the ./bin dir)
#mkdir.exe     sed.exe       wc.exe       gawk.exe
#ls.exe        rm.exe        sh.exe       wget.exe

#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; either version 2
#of the License, or any later version.

#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#GNU General Public License for more details.

#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA.

######################
#Environment Variable#
######################
PATH="bin"

###########
#Constants#
###########
#RegExp
readonly V_IF_EXPR_IMG_ADDR="\(.*\)\([0-9]\)"
readonly V_IF_EXPR_CHECK_PAGE="\(.*\)\(image\.php?id=[0-9]*&check=[0-9A-Za-z]*\)\(.*\)"

###############
#CONFIGURATION#
###############

#Proxy configuration file
readonly F_WGET_CONF="config/wget.conf"

#Fap Grabr configuration files
readonly F_IS_CONF="config/is.conf"

#Maximum number of passes per gallery
MAX_PASS=3

#Maximum number of try per pic, 0=unlimited
MAX_TRY=5

#Max string length of the pic names
MAX_STR_LENGTH=5

#Minimum file size the pics have to be, to be considered as valid (in bytes)
readonly V_MFS=500

#Wget command line configuration, wait 15 secs between the 5 retievals
readonly V_WGET_CMD="--quiet --load-cookie=cookie.txt --waitretry=15 --tries=$MAX_TRY"
#--referer=http://www.imagfap.com --user-agent='Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)'

#Script Files
#finds and outputs address of the age verification page
readonly getAgeVerifPage="./script/getAgeVerifPage.awk"

#finds and outputs address of the pic pages
readonly getPicPagesURL="./script/getPicPagesURL.awk"

#finds and outputs address of the next gallery page
readonly getGallPagesURL="./script/getGallPagesURL.awk"

#finds and outputs the encoded URL of the picture
readonly getEncURL="./script/getEncURL.awk"

#is the file big enough to be a pic?
readonly getSizeOfFile="./script/getSizeOfFile.awk"

#where to store galleries address
F_GALLERIES="galleries.txt"

RESUME=0
 
###########
#FUNCTIONS#
###########
#gets id of the gallery (e.g 45133) in order to create directories and files
get_gall_id(){
	V_GALL_ID="$(print "$1" | sed 's/\(.*id=\)\([0-9]*\)\(.*\)/\2/')"
}

#Extracts name of the gallery and makes it legal
get_gall_name(){
	V_GALL_NAME=$(wget --quiet -O - "$1" | sed -n "/url(img\/win-fff.gif)/p" | sed -n "s/\(.*<b>\)\(.*\)/\2/p")
	V_GALL_NAME=$(legalate "$V_GALL_NAME") # | sed -n "s///p")
	V_GALL_ID="$V_GALL_ID.$V_GALL_NAME"
}

#sets various directories and files locations
set_dir_file_var(){
	#DIRECTORIES
D_TEXT_STORE="$V_GALL_ID/text"
D_HTML_STORE="$V_GALL_ID/html"
D_IMAGE_STORE="$V_GALL_ID/images"

	#FILES
	#File holding address of the cookie validation page
F_CHECK="$D_TEXT_STORE/check.txt"

	#Address of missing images from previous downloads
F_NOT_DOWNLOADED="$D_TEXT_STORE/NotDL.txt"

	#Dump of each page of the gallery (always the same file erased by the next one)
F_GALL_PAGE="$D_HTML_STORE/gallPage.html"
F_TMP="$D_HTML_STORE/image.htm"

	#Page image addresses of the whole gallery
F_PIC_PAGE_ADDR="$D_TEXT_STORE/imagepageaddr.txt"

	#holds number of the next pic when resuming a broken gallery
F_PIC_START="$D_TEXT_STORE/start_number.txt"

	#Addresses of the gallery
	F_GALL_ADDR="$V_GALL_ID/gall_address.txt"
}

#loads proxy settings
read_proxy_setting(){
for line in $(<$F_WGET_CONF)
	do
		case $line in 
			http_proxy*)
				export $line
				print $line
				;;

			--proxy-user*)
				V_WGET_CMD="$V_WGET_CMD $line"
				print $line
				;;
			--proxy-passwd*)
				V_WGET_CMD="$V_WGET_CMD $line"
				print $line
				;;

			*)
			;;
		esac
	done
}

#gets proxy settings
get_proxy_setting(){
print "\t\t:::::::::::::::::PROXY SETTINGS:::::::::::::::::"
	print ""
		print "Please enter your proxy settings, in this form:"
		print "http://proxy.address.com:port_number"
		read server?"Proxy server address & port>"
		print "http_proxy=$server" > $F_WGET_CONF

		read USR?"Proxy User Name (press enter if you don't have one)>"
		read PASS?"Proxy Password (press enter if you don't have one)>"

		if [[ ! -z $USR ]]
		then
			print "--proxy-user="\"$USR\""" >> $F_WGET_CONF
			print "--proxy-passwd=\"$PASS\"" >> $F_WGET_CONF
		fi
		print "config saved in $F_WGET_CONF"
}

#called when proxy is selected from the menu
proxy(){

	if [[ -s $F_WGET_CONF ]]
	then
		print "\t\tChanging proxy settings"
		print ""
	else
		print "\t\tSetting up a proxy server"
		print ""
	fi
	get_proxy_setting
	read_proxy_setting
}

#read Fap Grabr configuration file
read_is_conf(){
	if [[ ! -f "$F_IS_CONF" ]]
	then
		print "No configuration file found !"
	else
		for line in $(<$F_IS_CONF)
		do
			case $line in
				MAX_PASS*)
					export $(print $line)
					;;
				#MAX_TRY*)
				#	export $(print $line)
				#	;;
				*)
				;;
			esac
		done
	fi
}

write_is_conf(){
	print "\t\t:::::::::::::::FAP GRABR SETTINGS:::::::::::::::"
	print ""
	read ans?"How many tries for retrieving missing files?"
	print "MAX_PASS=$ans" > "$F_IS_CONF"
	read ans?"Enter the maximum number of try per picture:"
	print "MAX_TRY=$ans" >> "$F_IS_CONF"
	if [[ -f "$F_IS_CONF" || -z "$F_IS_CONF" ]]
	then
		print "Configuration file written successfully"
		print ""
	else
		print "Something went wrong, configuration file not written"
		print ""
	fi
}

#called from the menu, when the user wants to edit Fap Grabr configuration file
is_conf(){
	if [[ -f "$F_IS_CONF" ]]
	then
		print "\t\tChanging Fap Grabr Configuration"
		print ""
	else
		print "\t\tSetting up Fap Grabr Configuration"
		print ""
	fi
	write_is_conf
	read_is_conf
}


#Displays the menu and controls the scripts
menu(){
	#Autoresume
	if [[ -f $F_GALLERIES && ! -z $F_GALLERIES ]]
	then
		downloader	
	fi
	
	print
	print
	print
	print
	print
	print "\t\t::::::::::::::::::::::MENU::::::::::::::::::::::"
	print ""
	print "\t\t1) Fap Grabr Settings"
	print "\t\t2) Proxy Settings"
	print "\t\t3) Download Gallery(ies)"
	print "\t\t4) Quit"

	print
	read choice?"            Please enter a number:"

	case $choice in
		1)
		is_conf
		
		;;

		2)
		proxy
		
		;;

		3)
		ask_loop
		downloader
		
		;;

		4)
		exit
		;;

		*)
		 print "\t\tSorry ! Not a valid entry. Please try again."
		 
		 ;;
	esac
	menu
}

#called from the menu, when the user wants to download
downloader(){
	
	for line in $(<$F_GALLERIES)
	do
		get_gall_id "$line"
		get_gall_name "$line"
		set_dir_file_var
		check
		if (( $? == 0 ))
		then
			retrieve_gall "$line"
		fi
	done
	rm $F_GALLERIES
}	

#used by downloader, ask the user, galleries' address he/she(unlikely!) wants to download
ask_loop(){
	print "" > $F_GALLERIES
	print ""
	print "\t\t:::::::::::::DOWNLOAD  GALLERY(IES):::::::::::::"
	print ""

	while [[ "$addGall" != "go" ]]
	do
		print ""
		print "\tPaste address gallery via click right -> paste and press enter"
		print "\tExample: http://www.imagefap.com/gallery.php?gid=1458823"
		print "\tEnter 'go' on a single line to start the downloader"
		read addGall
		if [[ $addGall != "go" ]]
		then
				print $addGall >> $F_GALLERIES
		
		else
			print "\tStarting Downloader"
		fi
		print "\t*******************************************"
		print
		print
	done
}

#gets file extension as it can change (jpeg, jpg, gif), needed because we want to re-order the downloaded files i.e rename them.

get_file_extension(){
	V_FILE_EXT=$(print $1 | sed "s/\(.*\)\(\.[a-z]\)/\2/")
}

#sets variables that are needed by the 'dec' program to decipher pictures URL.
#In order to do it, we need:
#	+the ciphered string without the last character
#	+this last character
#
#...this last character being the offset by which we must decrement each character of the string
#to get it deciphered.

if_set_variable(){
	V_IF_STR_UNDEC=$(print "$1" | sed -n "s/$V_IF_EXPR_IMG_ADDR/\1/p")
	V_IF_OFFSET=$(print "$1" | sed -n "s/$V_IF_EXPR_IMG_ADDR/\2/p")
}

#retrieve a page of the gallery and dump each enclosed pic page addresses
#to $F_PIC_PAGE_ADDR
get_gall_page(){
	print "\tRetrieving page(s) of gallery..."
	wget $V_WGET_CMD -O "$F_GALL_PAGE" "$1" 

	print "\tDone."
	
	print ""
	print "\tGet image page addresses contained in the current page"
	gawk -f "$getPicPagesURL" "$F_GALL_PAGE" >> "$F_PIC_PAGE_ADDR"

	print "\tDone."
}

#try to find if another page in the gallery is available. Return codes are for loop purpose
get_next_gall_page_addr(){
	nextPageAddr=$(gawk -f "$getGallPagesURL" "$F_GALL_PAGE")
	if [[ ! -z $nextPageAddr ]]
	then
		print "\tGetting page number $V_PAGE_COUNT" 
		get_gall_page "$nextPageAddr"
		return 0
	else
		print "\tNo more Pages..."
		return 1
	fi
}

#gets the cookie allowing us to retrieve (adult) pictures
get_cookie(){
	print "\tGetting Age Verification Page..."
	v_check=$(wget --quiet "${picPageAddr[0]}" -O - | gawk -f "$getAgeVerifPage")
	print "\tDone"
	print ""
	print "\tGetting Age Verification cookie..."
	wget --quiet --save-cookie=cookie.txt "$v_check"

	print "\tI Am 18 Now"
}

#verifies the size of the downloaded pics
#arg1: path to file + file name of file to check
#arg2: minimum size the file must be to be considered as a pic one (in bytes)
#return:set a non zero string if the file is not considered as valid, a zero string if it is

verify_file_size(){
	V_FILE_SIZE=$(ls -la "$1" | gawk -f "$getSizeOfFile" MIN_FILE_SIZE=$V_MFS)
}

#gets the so coveted picture
#We have to build the pic address from the ciphered string. We use 
#'dec' which is fast but maybe unreliable!

get_image(){
	print ""
	print "\tBuilding raw image address..."
	if_set_variable "$1"
	line=$(dec "$V_IF_STR_UNDEC" "$V_IF_OFFSET")

	get_file_extension "$line"

	print "\t\tRetrieving image number $V_COUNT_IMAGE at address:"
	print "$line"

	#"Spawn" a wget process in a separate shell and background it
	#in order to speed up things a little bit
	#(
	#pad the name with zeros

	str_length=$(print "$V_COUNT_IMAGE" | wc -c)
	let str_length--

	typeset image_name=$V_COUNT_IMAGE
	while (( $str_length <= $MAX_STR_LENGTH ))
	do
		image_name="0${image_name}"
		let str_length++
	done

	wget $V_WGET_CMD -O "$D_IMAGE_STORE/${image_name}${V_FILE_EXT}" "$line"

	#if retrieve fails, save file number and file URL (in this order, on one line)

	verify_file_size "$D_IMAGE_STORE/${image_name}${V_FILE_EXT}" "$V_MFS"
	if [[ $? -ne 0 || ! -z $V_FILE_SIZE ]]
	then
		print "${image_name}${line}" >> "$F_NOT_DOWNLOADED"
	fi

	#)&

}


#creates directories as needed
create_dir(){
	print "\t$V_GALL_ID"
	mkdir "$V_GALL_ID"
	print "\t$D_HTML_STORE"
	mkdir "$D_HTML_STORE"
	print "\t$D_TEXT_STORE"
	mkdir "$D_TEXT_STORE"
	print "\t$D_IMAGE_STORE"
	mkdir "$D_IMAGE_STORE"
}

get_missing_file(){
	if [[ -f $F_NOT_DOWNLOADED ]]
	then
		print "Retrieving missing file..."

		#load missing pics address in a variable
		lines=$(<$F_NOT_DOWNLOADED)

		#delete the file holding those addresses
		rm $F_NOT_DOWNLOADED
		for line in $lines
		do
			number=$(print $line | sed -n "s/\([0-9]*\)\(.*\)/\1/p")
			img_addr=$(print $line | sed -n "s/\(.*\)\(http:\/\/.*\)/\2/p")
			get_file_extension $img_addr
			missing_file="$D_IMAGE_STORE/$number$V_FILE_EXT"

			print "Getting image number $number at address:"
			print "\t$img_addr"
			wget $V_WGET_CMD -O "$missing_file" "$img_addr"
			verify_file_size "$missing_file" "$V_MFS"

			#if an error happens during the new retrieval, rebuild the missing
			#pic files
			if [[ $? -ne 0 || ! -z $V_FILE_SIZE ]]
			then
				print "$number$img_addr" >> "$F_NOT_DOWNLOADED"
			fi

		done
	else
		print
		print "\t\tNothing to retrieve..."
	fi
}

resume(){
	RESUME=1
	pic_start_number=$(< $F_PIC_START)
	retrieve_gall "$V_GALL_ID" "$pic_start_number"
	RESUME=0
}

check(){
	#check if the directory exists

	if [[ -d "$V_GALL_ID" ]] #yes
	then
		if [[ -f $F_PIC_START && -f $F_PIC_PAGE_ADDR ]]
		then
			ans=3
		elif [[ -f $F_NOT_DOWNLOADED ]]
		then
			ans=2
		else
			ans=1
		fi
		
		case $ans in
			1)
				print "\t\t****NOTHING TO DO for $V_GALL_ID****"
				;;
			2)
				print "\t************************************************************"
				print "\tGetting missing file of $V_GALL_ID "
				print "\t************************************************************"

				get_missing_file "$V_GALL_ID"
				;;
			3)
				print "\t************************************************************"
				print "\tResuming $V_GALL_ID "
				print "\t************************************************************"
				resume	
				;;
		esac
		return 1
	else
		return 0
	fi
}

dump_left_pages(){
	print "***********************"
	print "SIGINT (CTRL-C) caught!"
	print "***********************"
	print $V_COUNT_IMAGE > $F_PIC_START
	print
	if [[ ! -z ${#picPageAddr[*]} ]]
	then
		print "\tStarting to dump remaining pic page addresses"
		while (($left_lines < $max_idx ))
		do
		      print "${picPageAddr[left_lines++]}" >> "$F_PIC_PAGE_ADDR"
		      print "\t.: Dumping! :."
		done
		print "\tDone."
		clean
	fi
	exit 1
}

#retrieves a whole gallery
retrieve_gall(){
	V_COUNT_IMAGE=$2
	if (( $RESUME == 0))
	then
		print "******************************************************************************"
		print "RETRIEVING GALLERY $1"
		print "******************************************************************************"
		print "Getting gallery's ID"
		get_gall_id "$1"
		get_gall_name "$1"
	
		print "Creating directories"
		create_dir

		#save the address of the current gallery (for redownloading purpose)
		print "$1" > "$F_GALL_ADDR"
	
		#get the first gallery's page
		V_PAGE_COUNT=1
		V_COUNT_IMAGE=1
		get_gall_page "$1"

		#get the next ones (if any)
		while (( $? == 0 ))
		do
			let V_PAGE_COUNT++
			get_next_gall_page_addr
		done
	fi

	#load an array with pic page addresses
	set -A picPageAddr $(<"$F_PIC_PAGE_ADDR")
	rm "$F_PIC_PAGE_ADDR"

	#set trap, in order to catch a SIG-INT (CTRL-C) signal
	trap dump_left_pages INT

	#get the age verification cookie
	get_cookie
	
	#get pics

	#set current position in the array
	current_pos=0
	max_idx=${#picPageAddr[*]}

	while (( $current_pos < $max_idx ))
	do
		
		_img_addr=$(wget $V_WGET_CMD -O - "${picPageAddr[current_pos]}" | gawk -f "$getEncURL")

		#_patch from belterone
		#Work Around again:
	        # aargh. sometimes whatever makes the list puts a _blank in there.
                # obviously it's going nowhere, so skip it.
                if [[ "$line" == "http://www.imagefap.com/_blank" ]]
                then
                        continue
                fi;
		#_
		
		#Work Around: Be sure that we have something in $_img_addr
		#as sometimes we can't get the encoded URL during the first download pass
		#(ImageFap bug?)
		while [[ -z "$_img_addr" ]]
		do
			wget $V_WGET_CMD -O $F_TMP "${picPageAddr[current_pos]}"
		       	_img_addr=$(gawk -f "$getEncURL" $F_TMP)
		done

		get_image "$_img_addr"

		let left_lines=++current_pos
		
		let V_COUNT_IMAGE++

	done

	#try to redownload if there are missing files, try $MAX_PASS times
	pass=1
	while [[ $pass -le $MAX_PASS && -f $F_NOT_DOWNLOADED ]]
	do
		print "*******************************************************************************"
		print "Trying to get missing files...PASS $pass"
		print "*******************************************************************************"
		get_missing_file
		let pass++
	done

	#Cleaning up
	clean
}

clean(){
	print "Cleaning up garbage..."
		if [[ -d "$D_HTML_STORE" ]]
		then
			rm -r "$D_HTML_STORE"
		fi
		rm -f image.php*
		rm -f cookie.txt
	print "Done."
}
#Written using VIM, the ultimate code editor, see http://www.vim.org/
