import os
import gzip
import re
import sys
import htmldata
from BeautifulSoup import *

# Global Variable
__SEPERATOR__ = '==P=>>>>=i===<<<<=T===>=A===<=!Junghoo!==>'
validContentTypes = ('text/html','text','text/plain','application/xhtml+xml')

def ParseDoc(site_content , DB):
    '''
    Parse the document header into list and return
    [start position, end position, header dictionary]
    Feature:
      - No filter any type of document
    '''

    # split document into list [start, end, header dictionary]
    doc_list = []
    for m in re.finditer(__SEPERATOR__,site_content): # docid per content
        # parse header
        if len(site_content)-m.end() < 100 :
            break
        header_list =  site_content[m.end():m.end()+900].split('\n')
        dict = {}
        for h in header_list :
            if h != '' :
                # Add header to dictionary
                key = h[0:h.find(' ')][:-1]
                value = h[h.find(' ')+1:].strip()
                dict[key] = value

                # Stop parse header when found 'Content-Type'
                if key == 'Content-Type' : break
                # Insert doc_url to DB
                if key == 'URL' : insert_url(value,DB)

        docid = len(doc_list)
        if dict != {} :
            if docid == 0:
                # create new record
                doc_list.append([m.start(),0,dict])
            else:
                # update past record
                doc_list.append([m.start(),0,dict])
                doc_list[docid-1][1] = m.start()
    return doc_list 

def ParseURL(content, hostname, DB):
    '''
    Parse URL form webpage to List
    >>> ParseURL( <string of content> , <string hostname> , <bsddb object,None> )
    '''
    if not re.search('href',content,re.IGNORECASE) == None :
        links = htmldata.urlextract(htmldecode(content), hostname)
        if DB != None :
            urllist = [insert_url(u.url,DB) for u in links]
        else:
            urllist = [u.url for u in links]
        return urllist
    else:
        return []

def insert_url(url,DB):
    '''
    Insert unique url into database
    >>> insert_url( <bsddb object> , <string url> )
    '''
    if not DB.has_key(url):
        DB[url] = str(len(DB))
    DB.sync()
    return url

def htmldecode(string):
    try:
        return str( BeautifulStoneSoup(string,convertEntities=BeautifulStoneSoup.HTML_ENTITIES) )
    except:
        return string
