from selenium import webdriver
from utils.animator import TimeSleepAnimator
from utils.constants import CrawlField
from utils.cms_generator import CMSGenerator
from utils.configs import Config
from retry import retry
import time

class BingCrawler(object):

    def __init__(self, browser_path, crawl_depth=3, time_interval=30):
        self._url = "https://cn.bing.com/?ensearch=1"
        self._crawl_depth = crawl_depth
        option = webdriver.ChromeOptions()
        option.add_argument('headless')
        self._browser = webdriver.Chrome(executable_path=browser_path, options=option)
        self._browser.maximize_window()
        self._browser.get(self._url)
        self._browser.implicitly_wait(5)  # 等待跳转
        en_button = self._browser.find_element_by_id("est_en")
        en_button.click()
        self._browser.implicitly_wait(5)
        self._animator = TimeSleepAnimator(time_interval=time_interval)

    def _crawl(self):
        all_search_res = []
        crawl_page_num = self._crawl_depth
        for page_index in range(1, crawl_page_num + 1):
            print("[bing_crawler] current page: {}".format(page_index))

            search_res = self._browser.find_elements_by_css_selector("#b_results li.b_algo")
            for res in search_res:
                a_tag = res.find_element_by_css_selector("h2 a")
                url = a_tag.get_attribute("href")
                print(url)
                title = a_tag.text
                try:
                    brief_content = res.find_element_by_css_selector("div.b_caption > p").text
                except Exception as e:
                    brief_content = " ".join(res.find_element_by_css_selector("div.b_caption").text.split("\n"))
                    print("[bing_crawler-WARNING] Title: {} 含特殊标签！\n[bing_crawler-WARNING] brief_content: {}"
                          .format(title, brief_content))

                title = title.encode("utf8").decode("utf8")
                brief_content = brief_content.encode("utf8").decode("utf8")
                fields = CrawlField(url, title, brief_content)
                all_search_res.append(fields)

            # 跳页
            try:
                next_page_url = self._browser.find_element_by_css_selector(
                    "#b_results > li.b_pag > nav > ul  a[title='Next page']") \
                    .get_attribute("href")
                self._browser.get(next_page_url)
                self._browser.implicitly_wait(5)  # 等待跳转
                time.sleep(2)
            except Exception as e:  #
                print("[bing_crawler] total page num: {}, exit the crawler.".format(page_index))
                break

        return all_search_res

    @retry(tries=3, delay=10)
    def crawl(self, key_word):
        # 输入框 输入内容
        key_word = key_word
        input_bar = self._browser.find_element_by_id("sb_form_q")
        input_bar.clear()
        input_bar.send_keys(key_word)
        self._browser.implicitly_wait(5)

        # 点击搜索按钮
        search_button = self._browser.find_element_by_id("sb_form_go")
        search_button.click()
        self._browser.implicitly_wait(5)  # 等待跳转
        time.sleep(2)

        #  数据获取
        ret = self._crawl()
        return ret

    def crawl_many(self, key_word_list):
        assert isinstance(key_word_list, list), "please provide a list object."
        crawl_data = {}
        for key_word in key_word_list:
            print("[bing_crawler] >>>>>>>> crawling key word: {} <<<<<<<<".format(key_word))
            try:
                crawl_res = self.crawl(key_word)
                crawl_data[key_word] = dict(amount=len(crawl_res), data=crawl_res)
                print("[bing_crawler] crawl amount: ", len(crawl_res))
                self._animator.draw()
            except Exception as e1:
                try:
                    print("[bing_crawler-crawl_many] lost page.. reloading >>>>>>")
                    self._browser.get(self._url)
                    crawl_res = self.crawl(key_word)
                    crawl_data[key_word] = dict(amount=len(crawl_res), data=crawl_res)
                    print("[bing_crawler] crawl amount: ", len(crawl_res))
                    self._animator.draw()
                except Exception as e2:
                    print("[bing_crawler] crawling key word: {} FAILED, failed reason:{}".format(key_word, e2))
                    crawl_data = dict(amount=0, data=[])

        return crawl_data

    def quit(self):
        self._browser.quit()


if __name__ == "__main__":
    config_file_path = r".\configs\config.yml"
    config_obj = Config(config_file_path)
    keyword_list = config_obj.key_word_list
    chrome_driver_path = config_obj.chrome_driver_path
    crawl_depth = config_obj.crawl_depth
    crawl_interval = config_obj.crawl_interval
    bing_crawler = BingCrawler(chrome_driver_path, crawl_depth, crawl_interval)
    try:
        # bing_crawler.crawl(keyword)
        ret = bing_crawler.crawl_many(keyword_list)
        cms_generator = CMSGenerator()
        cms_generator.generate(ret)
    # except Exception as e:
    #     print(e)
    finally:
        bing_crawler.quit()
