from dbtools.mongo import get_mongodb_facebook_connection
from dbtools.mysql import get_connection
from multiprocessing.dummy import Pool as ThreadPool
from selenium import webdriver
import requests
import time
import re

from etc.logger import get_logger
from facepy import GraphAPI

'''
get token from facebook
'''
graph = GraphAPI('EAAKD3LXIZCaEBADQE7cNTYNovlDM18N71fM9g1glcKgEgGf55lePsSD7d6LAYEvoZAcZCtvgqOFsk0IWILlrLwG8ezAEb5rMdjbf3njUf3ZCpgaGN5ZAGu3DQm4rzN7PIfORT2vvwi8IfkoQIu8v7Tp2EIyXyUkEZD',timeout=10)
logger = get_logger('facebook_logger')
connection = get_connection()
mongodb_account_connection = get_mongodb_facebook_connection('uuid_facebook_id')
successful_list =[]
cookies ={'cookies':'datr=JkbAVpibhFqSA-yJ0RKrptjU; a11y=%7B%22sr%22%3A0%2C%22sr-ts%22%3A1457576123347%2C%22jk%22%3A0%2C%22jk-ts%22%3A1457576123347%2C%22kb%22%3A1%2C%22kb-ts%22%3A1457576123347%2C%22hcm%22%3A0%2C%22hcm-ts%22%3A1457576123347%7D; lu=gAucchk2KkTUdAn_5DjaUSJQ; c_user=100007352672406; fr=0UsuSHhyzDlF4OJLM.AWW3ECJNR90kpQS0IR9_YkoPZcw.BWwEY0.Gh.FdO.0.0.AWVgcd3B; xs=132%3A3PuTfyd7NUCtjA%3A2%3A1463718264%3A17736; csm=2; s=Aa4a4q7soglamzSL.BXT6lW; sb=Q5E-V0vnroGvIB4UGnwjxpe7; act=1464841012224%2F4; p=-2; presence=EDvF3EtimeF1464841044EuserFA21B07352672406A2EstateFDutF1464841044592Et2F_5b_5dElm2FnullEuct2F1464831113BEtrFA2loadA2EtwF3772844546EatF1464841042995CEchFDp_5f1B07352672406F1CC'}


def browser_get_id(basecurl):
    name = basecurl.split('/')[3]
    url = 'https://www.facebook.com/%s'%name
    req = requests.get(url, cookies=cookies,timeout=10)
    try:
        facebook_name = req.url.split('/')[-1]
        facebook_id = re.findall(r'content="fb://profile/(\w*?)"', req.text)[0]
        if not facebook_name or not facebook_id:
            print(req.text)
            raise
        return facebook_id, facebook_name
    except Exception as e:
        print(basecurl)
        logger.exception(e)
        # print(req.text)
        return None,None


def crawl_html(entry):
    facebook_url = entry['facebook_url']

    while True:
        try:
            res = graph.get(
                path='',
                id=facebook_url
            )
            if 'og_object' in res:
                logger.error(facebook_url)
                return
            if 'id' in res and 'name' not in res:
                facebook_id, facebook_name = browser_get_id(facebook_url)
            elif 'id' not in res:
                print(res)
                raise Exception
            else:
                facebook_id, facebook_name = res['id'], res['name']
            print(res)

            if facebook_id and facebook_name:
                mongodb_account_connection.insert_one({'uuid': entry['uuid'], 'facebook_id': facebook_id, 'facebook_name': facebook_name})
                print('%s : %s'%(facebook_url, res['id']))
            break
        except Exception as e:
            logger.exception(e)
            print(facebook_url)
            break
            # exit()



def main():
    for each in mongodb_account_connection.find({}):
        successful_list.append(each['uuid'])

    work_list = []
    with connection.cursor() as cursor:
        sql = "SELECT `uuid`,`facebook_url` FROM `organizationsummary` WHERE `facebook_url` IS NOT NULL"
        cursor.execute(sql)
        result = cursor.fetchall()
        for each in result:
            if each['uuid'] in successful_list:
                continue
            crawl_html(each)
            work_list.append(each)
            # break
    tread_num = 2
    # for i in range(tread_num):
    #     browser = webdriver.PhantomJS()
    #     browers.append(browser)

    # pool = ThreadPool(tread_num)
    # pool.map(crawl_html, work_list)
        # pool.map(work, work_list)

if __name__ == '__main__':
    main()