#!/bin/bash -x
#
# Last updated 20120625.1
#
####################################################################
#
# Copyright (C) 2012 MapAction UK Charity No. 1075977
#
# www.mapaction.org
# 
# This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version.
# 
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# 
# You should have received a copy of the GNU General Public License along with this program; if not, see <http://www.gnu.org/licenses>.
# 
# ###################################################################

g_pavuk=/usr/local/bin/pavuk

#
# Remote paths
#
# The hostname of the remote (joomla) site
g_hostname=www.mapaction.org
# Path beneath the root to the part of the site which we wish to mirror
g_remotepath=wiki

#
# Credentials for remote site
#
g_username=mapactionsearch
g_password=fail  # we put this line so that the variable isn't blank if the next line fails 
g_password=`cat /root/mapactionsearch.credentials`

#
# Local paths
#
# directory where the output will be stored
g_localoutputpath=/mnt/reference-files/Wiki_offline
# temp dir to use for this script
g_temp=/var/wikimirror
# lockfile for this script
g_lockfile=${g_temp}/update-offline-wiki.pid
# name of tempfile for saving login page
g_temploginpagename=loginpage.html
# path to the sessions cookies file 
g_pathtocookies=${g_temp}/cookies.txt

#
# Other parameters
#
# reject for particular mediawiki pages
g_mediawikirejectlist="*oldid=*"
g_mediawikirejectlist="${g_mediawikirejectlist},*action=edit*"
g_mediawikirejectlist="${g_mediawikirejectlist},*action=watch*"
g_mediawikirejectlist="${g_mediawikirejectlist},*action=unwatch*"
g_mediawikirejectlist="${g_mediawikirejectlist},*action=history*"
g_mediawikirejectlist="${g_mediawikirejectlist},*redirect=no*"
g_mediawikirejectlist="${g_mediawikirejectlist},*diff=*"
g_mediawikirejectlist="${g_mediawikirejectlist},*limit=*"
g_mediawikirejectlist="${g_mediawikirejectlist},*[/=]User:*"
g_mediawikirejectlist="${g_mediawikirejectlist},*[/=]User_talk:*"
g_mediawikirejectlist="${g_mediawikirejectlist},*Search:*"
g_mediawikirejectlist="${g_mediawikirejectlist},*Help:*"
g_mediawikirejectlist="${g_mediawikirejectlist},*feed=atom"
g_mediawikirejectlist="${g_mediawikirejectlist},*feed=rss"
g_mediawikirejectlist="${g_mediawikirejectlist},*printable=yes*"
g_mediawikirejectlist="${g_mediawikirejectlist},*opensearch_desc.php"
# g_mediawikirejectlist="${g_mediawikirejectlist},*Special:"
# g_mediawikirejectlist="${g_mediawikirejectlist},*=Special:*"
g_mediawikirejectlist="${g_mediawikirejectlist},*Special:[^AC]*"
g_mediawikirejectlist="${g_mediawikirejectlist},*Special:Contributions*"
g_mediawikirejectlist="${g_mediawikirejectlist},*Special:ChangePassword*"
g_mediawikirejectlist="${g_mediawikirejectlist},*index.php?title=File*"


#otherrejectlist

# avoid certain Joomla functions
g_otherrejectlist="*task=logout*"
g_otherrejectlist="${g_otherrejectlist},opensearch_desc.php"
g_otherrejectlist="${g_otherrejectlist},option=com_mapcat"
# avoid certain directories
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/about/*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/component/mapcat/*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/component/user/*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/deployments/*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/map-catalogue/*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/more-news/*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/phpBB3/*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/resources/*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/support/*"
# avoid certain files
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/calendar*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/contacts*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/deployments*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/index.php"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/map-catalogue*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/more-news*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/patistan-resources*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/resources*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/support*"
g_otherrejectlist="${g_otherrejectlist},http://${g_hostname}/team-launchpad*"



function dologin {
	l_hostname=$1
	l_pathtocookies=$2
	l_username=$3
	l_password=$4
	l_temp=$5
	l_temploginpagename=$6
	l_logindebugpagename=logindebug.html
	
    # Joomla produces a seesionID (or Token) has a hidden field within the login form. This
	# token must be posted with the username, password and relevant session cookie for 
	# Joomla to allow a user in. Therefore we have to do two requests, one to get and parse 
	# login page to get the token, and then a secound to actually do the login. The session
	# cookie have to be updated each time. Later (in a different functions) a third request
	# (reusing the session cookies each time) can actually do the retrive.
	
    # With pavuk there doesn't seem to be a way to output a single file to stdout, hence 
    # we have to write to is a tempory location and read it back - yuck!    
        
    ############################################################################"
    #"
    # First request (get login page)"
    #"
    ############################################################################"
    
    # First delete any of the temporary files the might be left over from previous runs.
    echo  ${l_pathtocookies}
    rm ${l_pathtocookies}
    rm ${l_temp}/${l_temploginpagename}
    rm ${l_temp}/${l_logindebugpagename}

    # Use pavuk to read the login page (single page only with no recursion) and write it to
    # tempory location
    ${g_pavuk} \
        -cookie_file ${l_pathtocookies} \
        -cookie_send \
        -cookie_recv \
        -cookie_update \
        -singlepage \
        -dont_leave_site \
        -noread_css \
        -nthreads 1 \
        -cdir ${l_temp} \
        -store_name ${l_temploginpagename} \
        "http://${l_hostname}/index.php"
        # "http://${l_hostname}/index.php?option=com_user&view=login"

    # The session ID is the the 32 character string in the html page.
    l_token=`cat ${l_temp}/${l_temploginpagename} | \
        grep -Po '"[a-zA-z0-9]{32}"' | \
        grep -Po [^\"\']{32}`
        
    # Now we have the session ID we can delete the tempory file(s)
    rm ${l_temp}/${l_temploginpagename}
    rm -r -f ${l_temp}/http/*
        
    ############################################################################"
    #"
    # Secound request (do login)"
    #"
    ############################################################################"

    # We now post the username, password, sessionID, with the session cookie to do the login
    # The updated cookies are saved. Output from this command is sent to /dev/null
    
    ${g_pavuk} \
        -cookie_file ${l_pathtocookies} \
        -cookie_send \
        -cookie_recv \
        -cookie_update \
        -singlepage \
        -dont_leave_site \
        -nthreads 1 \
        -noread_css \
        -cdir /dev/null \
        -request "URL:http://${l_hostname}/index.php METHOD:POST FIELD:username=${l_username} FIELD:passwd=${l_password} FIELD:task=login FIELD:${l_token}=1 FIELD:option=com_user" \
        "http://${l_hostname}/index.php"

}


function getwiki {
    l_hostname=$1
   	l_pathtocookies=$2
    l_remotepath=$3
    l_localpath=$4
    l_urlrejectlist=$5
    l_pavukinfodir=pavukinfodir
        
    ############################################################################"
    #"
    # Third request (main download)"
    #"
    ############################################################################"
	
	# First we remove any cached copy from previous runs
	rm -r -f ${l_localpath}/${l_remotepath}/*
	
    # The three "-tr_str_str" switches rename filenames (in the URL) prefixed with
    # "Image:" to "Image_" on disk. Simularly those prefixed with "File:" to "File_"
    # and any other instance of ":" "\" "!" "&" "=" "?" are all replaced with the
    # undersource "_" character
    #
    # The various fnrules have a simular function, controling the location and local
    # name by which each downloaded file is saved.
    #
    # The first four control where ccs files are saved, putting them out of the way
    # into their own speacial "css" directory 
    # -fnrules F "*/index.php?title=MediaWiki:Common.css*" "%d/css/%n%s.css" \
    # -fnrules F "*/index.php?title=MediaWiki:Print.css*" "%d/css/%n%s.css" \
    # -fnrules F "*/index.php?title=MediaWiki:Monobook.css*" "%d/css/%n%s.css" \
    # -fnrules F '*/index.php?title???action?raw?maxage?18000?smaxage?0?ts*gen?css' "%d/css/generic.css" \
    #
    # The next two ensure that any of the files in the "index.php" directory have
    # an "html" extension. This is important, since wikimedia creates a metadata 
    # page about all of the binary files (including images) where are uploaded.
    # The metadata page has the same name (and same extension) as the image file
    # it descibes. Whist a browser will regconises this correctly based on it's
    # mime.type, when it is a local file the OS will assumpt that the metadata html
    # page is actually the imagefile itself. Forcing an "html" extension onto every
    # file in the index.php dir solves this. The *real* image files are saved in a 
    # seperate dir named "images". 
    # -fnrules F "*/index.php/*:*" "%d/%n%s.html" \
    # -fnrules F "*/index.php[/?]*" "%d/%b%s.html" \
    #
    # Everything else just gets its default name
    # -fnrules F "*" "%d/%n%s" \

    ${g_pavuk} \
        -cookie_file ${l_pathtocookies} \
        -cookie_send \
        -cookie_recv \
        -cookie_update \
        -nthreads 1 \
        -mode mirror \
        -cdir ${l_localpath} \
        -store_name chunky-index.html \
		-sel_to_local \
        -dont_leave_site \
        -dont_leave_dir \
        -use_http11 \
        -store_info \
        -info_dir ${l_localpath}/${l_pavukinfodir} \
        -read_css \
        -maxrate 512 \
        -skip_url_pattern ${l_urlrejectlist} \
        -tr_str_str "Image:" "Image_" \
        -tr_str_str "File:" "File_" \
        -tr_chr_chr ":\\!&=?" "_" \
        -fnrules F "*/index.php?title=MediaWiki:Common.css*" "%d/css/%n%s.css" \
        -fnrules F "*/index.php?title=MediaWiki:Print.css*" "%d/css/%n%s.css" \
        -fnrules F "*/index.php?title=MediaWiki:Monobook.css*" "%d/css/%n%s.css" \
        -fnrules F '*/index.php?title???action?raw?maxage?18000?smaxage?0?ts*gen?css' "%d/css/generic.css" \
        -fnrules F "*/index.php/*:*" "%d/%n%s.html" \
        -fnrules F "*/index.php[/?]*" "%d/%b%s.html" \
        -fnrules F "*" "%d/%n%s" \
        http://${l_hostname}/${l_remotepath}/ \
		http://${l_hostname}/favicon.ico
      
}


### Actually do stuff
if ( set -o noclobber; echo "$$" > "$g_lockfile") 2> /dev/null; 
then
    trap 'rm -f "$g_lockfile"; exit $?' INT TERM EXIT

    # critical-section

    dologin ${g_hostname} ${g_pathtocookies} ${g_username} ${g_password} ${g_temp} ${g_temploginpagename}

    # Copy the wiki down to a local temporary location
	getwiki ${g_hostname} ${g_pathtocookies} ${g_remotepath} ${g_temp} "${g_mediawikirejectlist},${g_otherrejectlist}"

    #then rsync accross to the visable location in the reference-files directory 
    rsync --stats -h -a --delete-after ${g_temp}/${g_remotepath}/ ${g_localoutputpath}/${g_remotepath}/
    # finally force the dir owner to be something sensible
	chown --recursive ma_ref_editor:domain\ users ${g_localoutputpath}/${g_remotepath}/
	chmod --recursive u=rwX,go=rX ${g_localoutputpath}/${g_remotepath}/
	
    rm -f "$g_lockfile"
    trap - INT TERM EXIT
else
    echo "Failed to acquire lockfile: $g_lockfile." 
    echo "Held by $(cat $g_lockfile)"
    echo "This may legitately happen if there has been a large change at http://${g_hostname}/${g_remotepath}/"
    echo "If this message occurs repeatably, please investigate." 
fi 













