#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Based in pysquila.py 19 2005-07-21 15:05:47Z miguelzinho $ 

# pylog.py -    gathers squid log data and populate database using sqlalchemy

# Copyright (C) 2011 Brivaldo Alves da Silva Junior <condector@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
#

import getopt
import sys
import os
import time
import re
import locale
import gettext
from urlparse import urlparse
from stat import *


#class proxy_it():
#    """Funcao que vai armazenar os acessos de um IP enquanto for valido com ultima acesso menor que navigate_window"""
#    def __init__(dtime, duration, bytes, domain):
#        self.dtime = dtime
#        self.duration = duration
#        self.bytes = bytes
#        self.domain = domain

#    def update(upduration, bytes):
#        self.duration += upduration
#        self.bytes += upbytes
        
#class ip_access():
#    "Cada IP que acessa a Internet"
#    def __init__(client_ip):
#        self.client_ip = client_ip 
#        self.url = []

#    def add_url(dtime, duration, bytes, domain):
        #TODO: adiciona uma nova URL ou atualiza uma pre-existente
#        for i in self.url:
            #TODO: falta verificar se o dtime e menor que navigate_window
#            if i.domain == domain:
                #Atualize a entrada
#                i.update(duration, bytes)
#        else:
#            self.url.append(proxy_it(dtime, duration, client_ip, bytes, domain))

def gather_data():
    # Ultima horario coletado
    ultima = '0' 

    # Janela de tempo apos um acesso deixa de ser continuo
    navigate_window = 5

    # Contador de entradas coletadas
    counter = 0    

    print _('Reading log to dump data...')
    for file in args:
        print "Parsing: %s " % file[1] 
        print "Modified: %s " % time.strftime('%Y-%m-%d %H:%M:%S', \
                                        time.localtime(float(file[0])))

        for line in open(file[1], 'r'):
            if line.strip() == '':
                continue
            log_data = re.split('\s+', line)

            # Hora da entrada 
            dtime = log_data[0]

	    # Ignora as entradas anteriores a ultima leitura
            if dtime <= ultima:
                continue

            # Parser para extrair as infomacoes de acesso
            duration = int(log_data[1])
            client_ip = log_data[2]
            cache_cod_res, http_cod_res = log_data[3].split('/')
            bytes = int(log_data[4])
            metodo = log_data[5]
            url = log_data[6]
            
            # Separa a URL para ficar mais facil encontrar o dominio dos sites
            domain, path = urlparse(url)[1:3]
            ident = log_data[7]
            hi_code = log_data[8]
            mime = log_data[9]
            if '/' not in mime:
                mime = 'none/none'
	    try:
	            mtype, msubtype = mime.split('/')
	    except:
		pass
            
            #TODO: logica de temporizacao de entradas (usuario por site onde o ultimo acesso e inferior a navigate_window)
           

            # Tupla de Informacoes
            #(None, dtime, duration, client_ip, cache_cod_res, \
            values = (None, dtime, client_ip, cache_cod_res, \
                     #  http_cod_res, bytes, metodo, domain, path, ident, \
                       http_cod_res, bytes, domain)

            print values
            counter = counter + 1

    return counter

def purge_data():
    """Apaga as entradas antigas do banco de dados"""
    pass
   

# MAIN part of the script. Decides where it's running.
_ = gettext.gettext
args = sys.argv[1:]

# order the log files by last motification date, first the older, last the newer
result = []
for f in args:
    try:
            open(f)
    except IOError, (strerror):
            print strerror
            sys.exit(2)
    result.append([os.stat(f)[ST_MTIME],f])

    result.sort()
    args = result

    count = gather_data()
    
    #Limpa dados antigos
    #purge_res = purge_data()

    sys.exit()

