import urllib
import urllib2
import sys
import time
import datetime

from BeautifulSoup import BeautifulSoup
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker

import structures

# Login information
f = open('bugzilla.cfg')
soup = BeautifulSoup(f.read())
f.close()
URL = soup.url.string
USERNAME = soup.username.string
PASSWORD = soup.password.string
CHECKED = soup.checked.string
try:
    TIMEOUT = int(soup.timeout.string)
except ValueError:
    TIMEOUT = 10

# sql alchemy initialization
if soup.db_handler.string == 'sqlite':
    enginestr = '{handler}:///{address}'.format(
        handler=soup.db_handler.string,
        address=soup.db_address.string)
elif soup.db_handler.string == 'mysql+pymysql':
    enginestr = '{handler}://{username}:{password}@{address}'.format(
        handler=soup.db_handler.string,
        username=soup.db_username.string,
        password=soup.db_password.string,
        address=soup.db_address.string)
engine = create_engine(enginestr, echo=False)
Base = declarative_base()
Session = sessionmaker(bind=engine)
session = Session()

# urllib2 initialization
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor())
urllib2.install_opener(opener)
del f, soup

class Logger:
    # Currently set to be just like a normal print() statement
    def __init__(self):
        self.old_stdout = sys.stdout
    def write(self, s):
        self.old_stdout.write(s)

def login():
    params = urllib.urlencode(dict(Bugzilla_login=USERNAME,
        Bugzilla_password=PASSWORD, Bugzilla_restrictlogin=CHECKED))
    print('Logging in...')
    try:
        f = opener.open(URL, params, timeout=TIMEOUT)
        data = f.read()
        f.close()
        print('...done.')
        return True
    except:
        print('...error: {}'.format(sys.exc_info()[0]))
        return False

def string_to_date(s):
    return datetime.date(int(s[0:4]), int(s[5:7]), int(s[8:10]))

def download_products():
    existing_products = [product.name for product in all_products()]
    print('Downloading product list...')
    f = opener.open(URL + 'show_bug.cgi?id=1', timeout=TIMEOUT)
    data = f.read()
    f.close()
    soup = BeautifulSoup(data)
    product_list = [i.string for i in soup.select.contents if i.string != u'\n']
    for product in product_list:
        if product not in existing_products:
            session.add(structures.Product(product))
    session.commit()
    print('Products downloaded!')

def all_products():
    return session.query(structures.Product).all()

def all_bugs():
    return session.query(structures.Bug).all()

def all_changes():
    return session.query(structures.Change).all()

def bug_ids():
    return sorted([bug.bugid for bug in all_bugs()])

def missing_bugs():
    ids = [int(bug.bugid) for bug in all_bugs()]
    return set(range(1, max(ids))).difference(ids)

def open_bug_count(product, date):
    bugs = session.query(structures.Bug).filter(structures.Bug.product == product).all()
    return [bug.was_open(date) for bug in bugs].count(True)

def timeline(product, resolution=7):
    # Resolution in days
    resolution = datetime.timedelta(resolution)
    beginning = min([bug.opened for bug in session.query(structures.Bug).filter(
        structures.Bug.product == product).all()])
    ending = max([bug.died for bug in session.query(structures.Bug).filter(
        structures.Bug.product == product).all()])
    opencount = []
    day = beginning
    while day <= ending:
        opencount.append(open_bug_count(product, day))
        day += resolution
    return opencount

