import config
import logging
import sys
import requests
# from meta_redir import meta_redir_url
from time import sleep
from random import uniform
from bs4 import BeautifulSoup


def parse_desc(soup):
    try:
        info = soup.find(id="MPhotoContent").find_all(
            class_="msg")[0].find_all('div')[0].text
        meme.update(info=info)
        meme.save()
        logging.info('Meme '+meme.fbid +
                     ' updated in db with following info: '+info)

        return info
    except:
        logging.error("No info found.")
        return None


def parse_meme_page(meme, html):
    soup = BeautifulSoup(html, 'html.parser')

    if(soup.title.text != "Photo"):
        logging.error(
            "Problem w/ info request for %s, title received is %s", meme.fbid, soup.title.text)
        meme.update(deadlink=True)
        meme.save()
        return False

    info = parse_desc(soup)
    example_memes_fbids = parse_example_memes(meme, html)
    return info, example_memes_fbids


def parse_example_memes(meme, html):
    return


def get_info(meme):

    if(meme.deadlink):
        return None
        # TODO: Check if html_parsed
    if(meme.info != None):
        return True

    if(meme.html_path == None):
        # DEBUG

        # try: # for Python 3
        #     from http.client import HTTPConnection
        # except ImportError:
        #     from httplib import HTTPConnection
        # HTTPConnection.debuglevel = 1
        # requests_log = logging.getLogger("urllib3")
        # requests_log.setLevel(logging.DEBUG)
        # requests_log.propagate = True

        url = config.info_url_prefix+meme.fbid
        logging.debug('Getting info at '+url)

        try:
            r = requests.get(url, headers=config.headers,
                             cookies=config.cookies)
        except:
            logging.error('Error when requesting, trying to sleep 10s.')
            sleep(10)
            logging.error('Retrying...')
            r = requests.get(url, headers=config.headers,
                             cookies=config.cookies)

        t = uniform(config.sleeptime/2, 1.5*config.sleeptime)
        logging.debug('Just made a request, sleeping…')
        sleep(t)

        r.raise_for_status()
        path = None

        if(r.status_code != 200):
            logging.debug('Failed getting '+meme.fbid+' info: HTTP error')
            return None

        else:
            tentative_path = "data/db/files/html/"+meme.fbid+".html"
            logging.debug("trying to save html file")

            with open(tentative_path, "w") as f:
                f.write(r.text)
                path = tentative_path
                logging.info('HTML saved at '+path)
                meme.update(html_path=path, deadlink=False)
                meme.save()

    if(meme.html_path != None):
        with open(meme.html_path, "r") as f:
            html = f.read()
            info, example_memes_fbids = parse_meme_page(meme, html)
        return True
