# -*- encoding: utf-8 -*-
# jupiter 2003 parser
############################################################################
#    Copyright (C) 2007 by Håvard Gulldahl                                 #
#    havard@gulldahl.no                                                    #
#                                                                          #
#    This program is free software; you can redistribute it and#or modify  #
#    it under the terms of the GNU General Public License as published by  #
#    the Free Software Foundation; either version 2 of the License, or     #
#    (at your option) any later version.                                   #
#                                                                          #
#    This program is distributed in the hope that it will be useful,       #
#    but WITHOUT ANY WARRANTY; without even the implied warranty of        #
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the         #
#    GNU General Public License for more details.                          #
#                                                                          #
#    You should have received a copy of the GNU General Public License     #
#    along with this program; if not, write to the                         #
#    Free Software Foundation, Inc.,                                       #
#    59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.             #
############################################################################

import logging, re, urllib
from datetime import datetime
from time import strptime
from cgi import parse_qs, parse_qsl
import BeautifulSoup
import jeje, jeje.parser

class parser(jeje.parser.ejeje_parser):
    def parse_enkeltjournal(self, suppe, uri):
        logging.info('starter jupiter 2010-parser')
        logging.debug(uri)
        dato = suppe.h1.string[len('Offentlig journal for '):]
        logging.debug('fant dato %s', dato)
        self.sett_info(dato)
        while True:
            logging.debug('finner tittelposter i suppa')
            _journaltabell = suppe.find('table', 'mx')
            _tittelposter = _journaltabell.findChildren(recursive=False)[1:]
            logging.debug('fant %d titler', len(_tittelposter))
            for i, tp in enumerate(_tittelposter):

                #<th class="mxheader" valign="bottom" nowrap id="publisering1">Datert</th> 
                #<th class="mxheader" valign="bottom" nowrap id="publisering2">Type</th> 
                #<th class="mxheader" valign="bottom" nowrap id="publisering4">Beskrivelse</th> 
                #<th class="mxheader" valign="bottom" nowrap id="publisering5">Avsender/Mottaker</th> 
                #<th class="mxheader" valign="bottom" nowrap id="publisering7">Saksbehandler</th> 
                #<th class="mxheader" valign="bottom" nowrap id="publisering8">Saksnr</th> 
                #<th class="mxheader" valign="bottom" nowrap id="publisering9">Løpenr</th> 
                #<th class="mxheader" valign="bottom" nowrap id="publisering10">Dokument</th> 
                #<th class="mxheader" valign="bottom" nowrap id="publisering11">Vedlegg</th> 

                _dokumentdato, _type, _tittel, _adressat, _saksbehandler, _arkivsaksnr, \
                _refnr, _uri, _vedlegg = [b.contents for b in tp.findAll('td', 'mxcell')]
                tittel = ''.join([ s for s in _tittel if isinstance(s, BeautifulSoup.NavigableString)])
                logging.debug('arbeider med post #%d: %s', i+1, tittel)

                post = self.legg_til_post()
                post.sett_tittel(tittel)
                arkivsaksnr, dokumentnr = self.trekk_sammen_streng(_arkivsaksnr).split('-')
                #logging.debug('arkivsaksnr: %s, refnr: %s', arkivsaksnr, _refnr)
                post.sett_arkivsaksnr(arkivsaksnr)
                post.sett_dokumentnr(dokumentnr)
                post.sett_refnr(self.trekk_sammen_streng(_refnr))
                adressat = ''.join([ s for s in _adressat if isinstance(s, BeautifulSoup.NavigableString)])
                if adressat: 
                    post.sett_adressat(adressat)
                try:
                    saksbehinfo = _saksbehandler[1].table
                    navn, telefon, stilling = [z.string for i, z in enumerate(saksbehinfo.findChildren('td')) if i % 2 != 0]
                    person = post.sett_saksbehandler(navn)
                    person.telefon = telefon
                    person.stilling = stilling
                except:
                    raise 
                #logging.debug('dokumentdato: %s', self.trekk_sammen_streng(_dokumentdato))
                post.sett_dokumentdato(datetime(*strptime(self.trekk_sammen_streng(_dokumentdato), '%d.%m.%Y')[0:6]))
                post.sett_retning(_type[0][0])
                try:
                    post.sett_lovreferanse(self.trekk_sammen_streng(_uri[1].span))
                    post.sett_offentlig(False)
                except TypeError:
                    if isinstance(_uri[1], BeautifulSoup.Tag):
                        _u, _q = self.absoluttifiser(unicode(_uri[1]['href']), uri).split('?')
                        # stripping unnecessary query parts, to trim the uri
                        _qparts = ( (k,v) for k,v in parse_qsl(_q) if k != 'description' )
                        _uuri = "%s?%s" % (_u, urllib.urlencode(tuple(_qparts)))
                        post.sett_dok_uri(_uuri)
                except IndexError:
                    pass
                try:
                    v = [_v for _v in _vedlegg if isinstance(_v, BeautifulSoup.Tag)][1]
                    vv = v.find('a')
                    logging.debug("Fant vedlegg: %s (%s)", vv.string, vv['href'])
                    # TODO: hent vedlegg og lagre det
                except Exception, (e):
                    pass
#                    logging.exception(e)
                post.kontroller_felter()
            # hent neste suppe
            try: 
                neste_uri = self.absoluttifiser(suppe.find('a', 'next_page')['href'], uri)
                logging.debug("slurper inn neste uri: %s", neste_uri)
                suppe = self.suppeslurper(neste_uri)
#                suppe = BeautifulSoup.BeautifulSoup(urllib.urlopen(neste_uri).read(),
#                                                    smartQuotesTo=None,
#                                                    convertEntities='html')
                #jeje.suppe = suppe
            except Exception, (e):
                logging.exception(e)
                break
            


        return self.journalposter
