# -*- coding: utf-8 -*-
import datetime
import urllib
import urllib2
import logging
import signal
from bs4 import BeautifulSoup

from model import (
    User, UserDetail,
    Recipe, session)

logging.getLogger().setLevel(logging.DEBUG)


def handler(signum, frame):
    raise Exception("timeout")
signal.signal(signal.SIGALRM, handler)


def curl(url, data=None, retry=5, timeout=12):
    while(retry):
        signal.alarm(timeout)
        try:
            logging.debug('curling %s' % url)
            content = ""
            if data:
                encoded_data = urllib.urlencode(data)
                f = urllib2.urlopen(url, encoded_data)
            else:
                f = urllib2.urlopen(url)
            content = f.read()
            f.close()
            # cancel timer
            signal.alarm(0)
            break
        except Exception, e:
            logging.warning('curl exception: %s' % e)
            retry = retry - 1
    if retry:
        return content
    else:
        raise Exception('network problem')


def handle_recipe(url):
    u = lambda x: unicode(x)
    c = curl(url)
    soup = BeautifulSoup(c)
    s = soup.find_all("a", class_="g-recipe-user-ga")[0]['href']
    user_id = int(s.split('/')[2])
    l = soup.find_all("div", class_="g-recipe-img")
    pic_url = u(l[0].img['src']) if l else ''
    if pic_url == '':
        l = soup.find_all("div", class_="g-recipe-img-big")
        pic_url = u(l[0].img['src']) if l else ''
    title = u(soup.find_all("h1", class_="g-page-title")[0].string)
    l = soup.find_all("div", class_="g-recipe-intro")
    intro = u(l[0]) if l else ''
    material = u(soup.find_all("table", class_="g-recipe-ing")[0])
    detail = u(soup.find_all("div", class_="g-recipe-steps")[0])
    l = soup.find_all("div", class_="g-recipe-tips")
    tips = u(l[0]) if l else ''
    ctime = datetime.datetime.now()
    r = Recipe(
        user_id=user_id, pic_url=pic_url,
        title=title, intro=intro,
        material=material, detail=detail,
        tips=tips, ctime=ctime, dish_counter=0)

    session.add(r)
    session.flush()


def handle_user(url):
    u = lambda x: unicode(x)
    c = curl(url)
    soup = BeautifulSoup(c)

    user_id = url.split('/')[-2]
    nickname = u(soup.find_all('h1', class_='g-page-title')[0].string[:-3])
    ctime = datetime.datetime.now()
    user = User(
        id=user_id, email=url, nickname=nickname, password='', ctime=ctime)
    session.add(user)
    l = soup.find_all('div', class_='u-mini-profile')
    intro = u(l[0]) if l else ''
    pic_url = u(soup.find_all('div', class_='u-myinfo-avatar')[0].img['src'])
    ud = UserDetail(
        id=user_id, intro=intro, gender='', birthday='',
        hometown='', city='', job='', pic_url=pic_url)
    session.add(ud)
    session.flush()


def coldboot():
    base_url = 'http://www.xiachufang.com'
    c = curl(base_url + '/')
    # get headline recipe
    soup = BeautifulSoup(c)
    li = soup.find_all("ul", class_="h-topic-list-box")[0].find_all('li')
    headline_urls = [base_url + el.a['href'] for el in li]
    for url in headline_urls:
        handle_recipe(url)


def link_user():
    base_url = 'http://www.xiachufang.com'
    ids = session.query(Recipe.user_id.distinct()).all()
    ids = [i[0] for i in ids]
    for user_id in ids:
        url = base_url + '/cook/%s/' % user_id
        if session.query(User).filter(User.id == int(user_id)).first():
            continue
        handle_user(url)
