import os
import re
import time
import pickle
import string
import requests
from scrapy import Selector
from selenium import webdriver
from Spider.util.getJsonPort import GetPort
from Spider.util.UserAgentPool import MyUserAgent
from Spider.util.MyFuncClass import Item, handle_deal, HttpRequest
from Spider.settings import GET_CITY_URL, STATUS_CODE, USER, PASSWORD, STATIC_FILE_PATH, CHROMEDRIVER_PATH


class Parser(object):
    def __init__(self):
        self.cookies = None
    
    def parse_page(self):
        response = requests.get(GET_CITY_URL, headers={"User-Agent": MyUserAgent.random()},
                                allow_redirects=False)
        if response.status_code in STATUS_CODE:
            select = Selector(response=response)
            for letter in string.ascii_uppercase:
                citys = select.css(f'#city-{letter} span.cities a::text').extract()
                city_url = select.css(f'#city-{letter} span.cities a::attr(href)').extract()
                for index in range(len(citys)):
                    city = citys[index]
                    city_english_re = city_url[index]
                    page = 1
                    city_english = re.match('//(.*?)\..*?', city_english_re).group(1)
                    yield HttpRequest(url=GetPort(city=city, city_english=city_english, page=page).get_url, 
                                      callback=self.parse_detail, cookies=self.cookies,
                                      meta={'city': city, 'city_english': city_english, 'page': page})

    def parse_detail(self, http_response_obj):
        item = Item()
        data_dict = http_response_obj.json()
        city = http_response_obj.meta.get('city')
        city_english = http_response_obj.meta.get('city_english')
        page = http_response_obj.meta.get('page')
        mapping = {'shopId': 'poiId', 'image': 'frontImg', 'title': 'title', 'score': 'avgScore',
                   'commentNum': 'allCommentNum', 'address': 'address', 'avgPrice': 'avgPrice', 'deal': 'dealList'}
        # 响应状态码为200  正确响应
        if data_dict.get('status', None) == 0:
            # 正确响应 响应的页数有数据
            if data_dict.get('data').get('totalCounts') > 0 and len(data_dict.get('data').get('poiInfos')) != 0:
                data_list = data_dict.get('data').get('poiInfos')
                for data in data_list:
                    for key, value in mapping.items():
                        item[key] = data.get(value)
                    if item['deal']:
                        item['deal'] = handle_deal(item['deal'])
                    else:
                        item['deal'] = '无'
                    item['shopUrl'] = 'https://www.meituan.com/meishi/{}'.format(data.get('poiId'))
                    item['city'] = city
                    yield item
                page += 1
                yield HttpRequest(url=GetPort(city=city, city_english=city_english, page=page).get_url, 
                                  callback=self.parse_detail, cookies=self.cookies,
                                  meta={'city': city, 'city_english': city_english, 'page': page})
            # 正确响应 响应的页数没有数据 页码超了
            else:
                yield False

    def get_cookies(self, url):
        cookies = []
        cookie_file_path = os.path.join(STATIC_FILE_PATH, 'spider.cookie')
        if os.path.exists(cookie_file_path):
            cookies = pickle.load(open(file=cookie_file_path, mode='rb'))

        options = webdriver.ChromeOptions()
        options.add_argument('--disable-extensions')
        options.add_experimental_option('debuggerAddress', '127.0.0.1:9222')
        browser = webdriver.Chrome(executable_path=CHROMEDRIVER_PATH, options=options)
        if not cookies:
            browser.get(url)
            user = browser.find_element_by_css_selector('#login-email')
            user.click()
            user.send_keys(USER)
            pwd = browser.find_element_by_css_selector('#login-password')
            pwd.click()
            pwd.send_keys(PASSWORD)
            browser.find_element_by_xpath('//*[@data-mtevent="login.normal.submit"]').click()
            time.sleep(5)
            cookies = browser.get_cookies()
            pickle.dump(cookies, open(file=cookie_file_path, mode='wb'))
            browser.close()
        else:
            browser.close()

        cookie_dict = {}
        for cookie in cookies:
            cookie_dict[cookie['name']] = cookie['value']
        
        self.cookies = cookie_dict


if __name__ == '__main__':
    from Spider import settings
    a = Parser()
    a.get_cookies(settings.GET_COOKIE_URL)
    

