import os
import time
import traceback
from datetime import datetime
from threading import Thread

import logzero
from logzero import logger
from tqdm import tqdm
from selenium.common.exceptions import NoSuchElementException, StaleElementReferenceException
from http.client import RemoteDisconnected
from urllib.error import URLError

from config import REPORT_ERROR_COLLECTION_NAME, COLLECTION_FEED, LOG_FORMAT
from utils import open_driver, track_alert, get_mongo_collection, record_run_status

current_module = os.path.splitext(os.path.basename(__file__))[0]
logzero.logfile(f"/tmp/{current_module}.log", maxBytes=30_000_000, backupCount=3,
                encoding='utf-8')
logzero.formatter(LOG_FORMAT)

COLLECTION_NAME = current_module
db = get_mongo_collection(COLLECTION_NAME)
error_db = get_mongo_collection(REPORT_ERROR_COLLECTION_NAME)
insert_count = 0


def spider(feeds):
    with open_driver(width=800, height=900, cookie_domain='.toutiao.com') as driver:
        with track_alert(driver):
            for uid, user_name in feeds:
                try:
                    data_list = get_toutiao_list(driver, uid, user_name)
                    handle_detail_imgs(driver, data_list)
                except (
                        NoSuchElementException, StaleElementReferenceException, RemoteDisconnected,
                        ConnectionRefusedError,
                        URLError) as e:
                    handle_exception(e, uid, user_name)
                else:
                    delete_error(uid, user_name)
            logger.info('完成')


def get_toutiao_list(driver, uid, user_name):
    data_list = []
    url = f'https://m.toutiao.com/m{uid}/'
    logger.info(f'请求 {url}')
    start = time.time()
    driver.get(url)
    end = time.time()
    logger.info(f'用时 {end-start} 秒')
    for i in range(30):
        logger.info(f'滑动 {i+1} 次')
        driver.scroll_buttom()
        time.sleep(1.5)
        # driver.save_screenshot(f'screenshot/{uid}_{i}.png')
    li_eles = driver.find_elements_by_css_selector('div.mediaFeed>ul>li')
    logger.info(f'找到{len(li_eles)}个')
    for li_ele in li_eles:
        a_ele = li_ele.find_element_by_css_selector('div > div > div > a')
        title = a_ele.text
        href = a_ele.get_attribute('href')
        img_eles = li_ele.find_elements_by_css_selector('div > div > div > div.img-list.y-box > a')
        imgs = []
        for img_ele in img_eles:
            imgs.append(img_ele.get_attribute('href'))
        if len(imgs) == 0:
            continue
        data = {'uid': uid, 'user_name': user_name, 'title': title, 'href': href}
        logger.info(data)
        data_list.append(data)
    return data_list


def handle_detail_imgs(driver, data_list):
    stop = False
    for data in data_list:
        if stop:
            logger.info('该用户下的其他详情不再爬取')
            break
        href = data['href']
        logger.info(f'查看详情 {href}')
        driver.get(href)
        # 图片如果是先从图片页获取图片，如果没有则是资讯类，获取资讯页的图片
        imgs = get_detail_imgs_from_pics(driver) or get_detail_imgs_from_zixun(driver)
        if len(imgs) == 0:
            logger.error('爬取到的图片为空')
            continue
        data['imgs'] = imgs
        data['update_time'] = datetime.now()
        logger.info(data)
        stop = store_data(data)
    return data_list


def get_detail_imgs_from_pics(driver):
    logger.debug('从照片页获取图片')
    imgs = []
    time.sleep(5)
    img_eles = driver.find_elements_by_css_selector('div.imageList>ul.image-list>li>div>img')
    logger.info(f'获得图片{len(img_eles)}张')
    for img_ele in img_eles:
        img = img_ele.get_attribute('data-src')
        imgs.append(img)
    return imgs


def get_detail_imgs_from_zixun(driver):
    logger.debug('从资讯页获取图片')
    imgs = []
    time.sleep(5)
    img_eles = driver.find_elements_by_css_selector('div.article-content>div>p>img')
    logger.info(f'获得图片{len(img_eles)}张')
    for img_ele in img_eles:
        img = img_ele.get_attribute("src")
        imgs.append(img)
    return imgs


def store_data(data):
    global insert_count
    stop = False
    href = data['href']
    old_record = db.find_one({'href': href})
    if old_record:
        logger.info(f'{href} 已经存储，不再插入')
        stop = True
    else:
        logger.info(f'{href}不存在，现在插入')
        logger.info(data)
        db.insert_one(data)
        insert_count += 1
    return stop


def handle_exception(e, uid, user_name):
    """
    处理异常
    :param e: 异常
    :param p: 页码
    :return:
    """
    logger.error(e)
    logger.error(traceback.format_exc())
    error_record_condition = {'desp': COLLECTION_NAME, 'uid': uid, 'user_name': user_name}
    error_record = error_db.find_one(error_record_condition)
    if not error_record:
        error_db.insert_one(
            {'desp': COLLECTION_NAME, 'uid': uid, 'user_name': user_name, 'e': e,
             'e_traceback': traceback.format_exc(), 'update_time': datetime.now()})


def delete_error(uid, user_name):
    error_record_condition = {'desp': COLLECTION_NAME, 'uid': uid, 'user_name': user_name}
    error_record = error_db.find_one(error_record_condition)
    if error_record:
        error_db.delete_one(error_record_condition)
        logger.info('删掉异常记录')


def get_feeds():
    db_feed = get_mongo_collection(COLLECTION_FEED)
    cursor = db_feed.find({'purpose': '头条图片'})
    feeds = []
    for cu in cursor:
        feeds.append((cu['uid'], cu['user_name']))
    return feeds


@record_run_status(current_module)
def main():
    feeds = get_feeds()
    mid = len(feeds) // 2
    before_mid = feeds[:mid]
    after_mid = feeds[mid:][::-1]
    t1 = Thread(target=spider, args=(before_mid,))
    t2 = Thread(target=spider, args=(after_mid,))
    t1.start()
    t2.start()
    t1.join()
    t2.join()
    return insert_count


if __name__ == '__main__':
    main()
