import time,os,random,urllib,re
from bs4 import BeautifulSoup
from until import Until,catch_exception
from dbbase import DBBase
from selenium.webdriver.chrome import webdriver
from selenium.webdriver.chrome.options import Options



until = Until()
dbbase = DBBase()

class WeiXinRequest():
    def __init__(self, keyword):
        self.keyword = keyword
        self.adminname_id = '123123'
    '''
    对URL进行编码
    '''
    def url_encode(self, word):
        # 对URL进行编码
        data = {
            '': word,
        }
        resu = urllib.parse.urlencode(data)
        re_g = '.*=(.*)'
        keyword_url = re.match(re_g, resu).group(1)
        return keyword_url
    '''
    处理数据
    '''
    def hand_source(self,cookies):
        x = until.request_add_cookies(cookies)
        page_count = 0
        length = 100
        while page_count < length:
            # time.sleep(random.randint(0, 5))
            page_count = page_count + 1
            print('#########这是第' + str(page_count) + '页数据#####')
            keyword_url = self.url_encode(self.keyword)
            login_url = 'http://weixin.sogou.com/weixin?query={}&_sug_type_=&sut=1571&lkt=5%2C1497837680560%2C1497837681243&' \
                         's_from=input&_sug_=y&type=2&sst0=1497837681355&page={}' \
                        '&ie=utf8&w=01019900&dr=1'.format(keyword_url, str(page_count))

            data_html = x.get(login_url, headers= until.useragent())
            data_html.encoding = 'utf-8'
            data_html_text = data_html.text
            soup = BeautifulSoup(data_html_text,'lxml')
            try:
                content_ul = soup.find('ul',{'class' :  'news-list'})
                content_li = content_ul.find_all('li')

            except Exception as e:
                data_html_title = soup.find('title').text
                if data_html_title == '搜狗搜索':
                    print('出现验证码')
                    x = until.request_add_cookies(cookies)
                    # 重新获取该页面信息
                    data_html = x.get(login_url)
                    data_html.encoding = 'utf-8'
                    data_html_text = data_html.text
                    soup = BeautifulSoup(data_html_text, 'lxml')
                    content_ul = soup.find('ul', {'class': 'news-list'})
                    content_li = content_ul.find_all('li')
                else:
                    print(e)
                    dbbase.insert_pagecount_state(keyword=self.keyword, page=str(page_count), pagecount='0',state='3',excepritoncontent=str(e),type='2')

            for i, li in zip(range(0, len(content_li), 1), content_li):
                print('#########这是第' + str(i + 1) + '条数据#####')
                url_soup = BeautifulSoup(str(li), 'xml')
                url_s = url_soup.find('a', id='sogou_vr_11002601_title_' + str(i)).get('href')
                url_z = url_s.replace('amp;', '')
                page_content = until.requestGet(cookies='0', url = url_z)

                page_content_soup = BeautifulSoup(page_content, 'lxml')
                # 判断是否出现阅读全文
                check_result = self.check_all_content(page_content_soup)
                if check_result:
                    page_content_text = until.requestGet(cookies= '0', url=check_result)
                    page_content_soup = BeautifulSoup(page_content_text, 'lxml')

                try:
                    title = until.remove_space(page_content_soup.find('h2', id='activity-name').text)
                    created_data = until.remove_space(page_content_soup.find('em', id='post-date').text)
                    created_user = until.remove_space(page_content_soup.find('a', id='post-user').text)
                    content = until.remove_space(page_content_soup.find('div', id='js_content').text)
                    # 将数据存入数据库
                    dbbase.insert_weixin(title=title, content=content,
                                         contentcreated=created_data,
                                         created_content_username=created_user,
                                         keyword=self.keyword,
                                         adminname_id=self.adminname_id,
                                        )
                except Exception as e:
                    print('抓取单个页面，出现异常')
                    dbbase.insert_pagecount_state(keyword=self.keyword, page = str(page_count), pagecount = str(i),state='3',excepritoncontent = str(e),type='2')
            # 判断是否有下一页，没有就停止
            next_page = soup.find('a',{'id':'sogou_next'})
            if next_page == None:
                print('没有下一页，采集结束')
                return
    '''
    检查是否出现点击阅读全文
    '''
    def check_all_content(self,page_content_soup):
        try:
            page_clic_a = page_content_soup.find('a', {'id': 'js_share_source'})['href']
            return page_clic_a
        except:
            return False
    '''
    初始化浏览器,
    '''
    def init_browser(self):
        # 配置chrome浏览器的驱动
        chrome_driver_file = os.path.abspath(r"C:\Program Files\tool\chormDriver\chromedriver.exe")
        os.environ["webdriver.chrome.driver"] = chrome_driver_file
        option = Options()
        option.binary_location = 'D:\ChromePortable\ChromePortable.exe'
        # option.add_argument('--user-data-dir=' + 'D:\chromDataAll\data2')
        user_agent = until.random_useragent()
        option.add_argument('--user-agent=' + user_agent)  # 随机获取一个user_agent
        self.browser = webdriver.WebDriver(executable_path=chrome_driver_file, chrome_options=option)
        self.browser.get('http://weixin.sogou.com/')

    '''
    调用浏览器模拟登陆，获取登陆的cookies
    '''
    @catch_exception
    def getCodeurl(self):
        self.browser.get('http://weixin.sogou.com/')
        time.sleep(2)
        self.browser.find_element_by_xpath('//*[@id="loginBtn"]').click()
        time.sleep(3)
        self.browser.switch_to.frame(0)
        time.sleep(2)
        iframetext = self.browser.page_source
        iframetsoup = BeautifulSoup(iframetext, 'xml')
        codeurl_src = iframetsoup.find('img', {'class': 'qrcode lightBorder'})['src']
        codeurl = 'https://open.weixin.qq.com' + codeurl_src
        print(codeurl)

        return codeurl

    '''
    检查是否登陆
    '''
    def checkislogin(self):
        time.sleep(3)
        try:
            logintext = self.browser.find_element_by_xpath('//*[@id="login_yes"]/a').text
            print(logintext)
            if logintext == '':
                return True
            print('登陆了')
            return False
        except:
            print('没有登陆')
            return True
    '''
    获取浏览器cookies
    '''
    def get_cookies(self):
        return self.browser.get_cookies()

if __name__ == '__main__':
    wxs = WeiXinRequest('C#')

    wxs.init_browser()
    wxs.getCodeurl()
    for i in range(0, 3):
        time.sleep(30)
        if wxs.checkislogin():
            print('请求登陆')
            wxs.getCodeurl()
            continue
        break
    cookies = wxs.get_cookies()
    wxs.browser.close()
    print(cookies)

    #cookies = [{'value': 'C698E2DD232C940A00000000595C5299', 'name': 'SUID', 'expiry': 2129942690.412663, 'path': '/', 'secure': False, 'httpOnly': False, 'domain': '.weixin.sogou.com'}, {'value': '0|1499222681|v1', 'name': 'ABTEST', 'expiry': 1501814690.412553, 'path': '/', 'secure': False, 'httpOnly': False, 'domain': 'weixin.sogou.com'}, {'value': '18-28814001-AVlcUv63qNoIKLw7N6jpjZA', 'name': 'sgid', 'expiry': 1500432391.261185, 'path': '/', 'secure': False, 'httpOnly': False, 'domain': '.sogou.com'}, {'value': 'CN3201', 'name': 'IPLOC', 'expiry': 1530758690.412625, 'path': '/', 'secure': False, 'httpOnly': False, 'domain': '.sogou.com'}, {'value': '5|1499222782|1500432382|dHJ1c3Q6MToxfGNsaWVudGlkOjQ6MjAxN3x1bmlxbmFtZToxMzpTaWxseSUyMGZvcmNlfGNydDoxMDoxNDk5MjIyNzgyfHJlZm5pY2s6MTM6U2lsbHklMjBmb3JjZXx1c2VyaWQ6NDQ6bzl0Mmx1STFkNlV3MlZzbVpveW1KejBtYmpJb0B3ZWl4aW4uc29odS5jb218', 'name': 'ppinf', 'expiry': 1500432391.261071, 'path': '/', 'secure': False, 'httpOnly': False, 'domain': '.sogou.com'}, {'value': 'C698E2DD3921940A00000000595C529A', 'name': 'SUID', 'expiry': 2129942690.804729, 'path': '/', 'secure': False, 'httpOnly': False, 'domain': '.sogou.com'}, {'value': '1', 'name': 'weixinIndexVisited', 'expiry': 1507862691, 'path': '/', 'secure': False, 'httpOnly': False, 'domain': 'weixin.sogou.com'}, {'value': '00E75AA0DDE298C6595C529BAADE2832', 'name': 'SUV', 'expiry': 1814582692.57767, 'path': '/', 'secure': False, 'httpOnly': False, 'domain': '.sogou.com'}, {'value': 'igj3BXQxsbfwXVhtxaKy1o_o67RyW1zLgYQ-c7OaPSA9ORajCQqbNDltLWp4cXeCQLHr-FILKcpnLyLwFuEAvj1CZ77ujiEwfC-MMVUtrzlZ0jxnFwPxaoImwDJPiDkKss3P9OonpkymHDgNKk1qaBFf0oux03ngvkG-DrWUOdo', 'name': 'pprdig', 'expiry': 1500432391.26114, 'path': '/', 'secure': False, 'httpOnly': False, 'domain': '.sogou.com'}, {'value': '14992227820000008699c600d18deb4d7e565f0b785881e1', 'name': 'ppmdig', 'path': '/', 'httpOnly': True, 'secure': False, 'domain': 'weixin.sogou.com'}]

    wxs.hand_source(cookies)