#!/usr/bin/env python

################################################################################
#   Copyright 2010 Jason Hoover
#
#
#   This file is part of AutoCache.
#
#   AutoCache is free software: you can redistribute it and/or modify
#   it under the terms of the GNU Lesser General Public License as published by
#   the Free Software Foundation, either version 3 of the License, or
#   (at your option) any later version.
#
#   AutoCache is distributed in the hope that it will be useful,
#   but WITHOUT ANY WARRANTY; without even the implied warranty of
#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#   GNU General Public License for more details.

#   You should have received a copy of the GNU Lesser General Public License
#   along with AutoCache.  If not, see <http://www.gnu.org/licenses/>.
################################################################################

import threading

from ac_common import *
from ac_cache import *

################################################################################
# ac_concurrentrequest - A function which allows ac to support concurrencies for 
# squid 2.5 and above. Concurrent requests look different than normal requests, 
# and come in in the format: "ID URL ip/fqdn ident method key-pairs"
#
# ID         - A Unique ID of the request.
# URL        - The incoming URL
# ip         - Request Source IP
# fqdn       - The request fqdn (hostname)
# method     - The HTTP method.
# key-pairs  - Unknown.
################################################################################

def ac_concurrentrequest(request):
    try:

        # Split the request into a tuple.
        # According to squid wiki come in the format of:
        # "ID URL ip/fqdn ident method key-pairs"
        ac_log.debug("Got request " + str(request))

        # If it's not a GET request, we don't give a fuck.
        if request[4] not in ("GET", "HEAD"):
            sys.stdout.write(request[0] + " \n")
            sys.stdout.flush()
            return

        # Check the request against the regexps.
        for regexp in targets:
            if regexp.match(request[1]):

                ac_log.debug("Got a match: " + request[1])
    
                # Store the cached target to see if there's anything. 
                # ac_checkup returns either the cached target, or nothing. 
                # Unfortunately, python prints "False" as "False" and not a 
                # null. ffffff.
                cached_target = ac_checkup(request[1])
                if cached_target:
                    
                    # We want to print our ID and our cached object if we've 
                    # won.
                    sys.stdout.write(request[0] + " " + cached_target + "\n")
                    sys.stdout.flush()
                    return

                else:

                    # Print our ID and start an update for the object. 
                    sys.stdout.write(request[0] + " \n")
                    sys.stdout.flush()
                    ac_update(request[1])
                    return

        # If it matches nothing, print this.
        ac_log.debug("Returning: " + request[0])
        sys.stdout.write(request[0] + " \n")
        sys.stdout.flush()

    except IndexError:
        ac_log.debug("There were values missing from: " + str(request))

    except:
        ac_log.exception("SUBMIT BUG REPORT - An unusual error occured processing: " + 
                         str(request))


################################################################################
# takerequest - This is the request processor, which is defined differently
# depending on whether concurrency is enabled. 
################################################################################

if ac_settings['concurrency']:
    def takerequest(request):
        try:
         
            if request == '':
                ac_log.debug("STDIN was Null. Closing.")
                return

            # Don't be afraid to spawn more threads as necessary. It's up
            # to squid.conf to not feed us too many at once.
            threading.Thread(target=ac_concurrentrequest,args=(request,)).start()

        except:
            ac_log.exception("SUBMIT BUG REPORT - Some other exception! Terminating.")
            return

else:
    def takerequest(request):
        try:
    
            # Must be a GET or HEAD to matter.
            if request[3] not in ("GET","HEAD"):
                print

            ac_log.debug("Got request: "+str(request))

            # Check the request against the regexps.
            for regexp in targets:
                if regexp.match(request[0]):

                    ac_log.debug("Got a match: " + request[0])

                    # Store the cached target to see if there's 
                    # anything. ac_checkup returns either the cached
                    # target, or nothing. Unfortunately, python prints
                    # "False" as "False" and not a null. 
                    cached_target = ac_checkup(request[0])
                    if cached_target:
                
                        # Print the cached version if we won.
                        sys.stdout.write(cached_target)
                        sys.stdout.flush()

                    else:

                        # Or we want to start a new download if we've lost.
                        sys.stdout.write(" \n")
                        sys.stdout.flush()
                        threading.Thread( target=ac_update, 
                                          args=(request[0], )).start()

        except IndexError:    
            print
            ac_log.warning("Some data was missing from the request: " + 
                         str(request))

        except:
            print
            ac_log.exception("SUBMIT BUG REPORT - Something went very -very- wrong processing the request:" +
                             str(request))

################################################################################
# main - Here we define the main loop. Note that there are two different 
# versions. One which supports concurrency using threads of the 
# ac_concurrentrequest function, and one which simply includes it's same logic, 
# but with slightly different variables.
################################################################################

def main():

        try:
            while True:

                request=sys.stdin.readline()            

                if request == '':
                    ac_log.debug("STDIN was Null. Closing.")
                    return

                # Split the request into a tuple.
                # According to squid wiki come in the format of:
                # "URL ip/fqdn ident method key-pairs"
                request=request.rstrip().split()
                takerequest(request)

        except (EOFError,KeyboardInterrupt):
            ac_log.debug("Got EOF or CTRL-C, all done!")
            return

        except:
            ac_log.exception("Some other horrible, miserable error occured during normal operation. SUBMIT BUG REPORT.")
            return

try:
    ac_log.debug("Starting AC.")
    targets=ac_compiletargets()
    ac_log.info("ac Started.")
    main()

except:
    ac_log.exception("Some other horrible, miserable error occured during startup. Check configs.")
