import datetime
import time
import pymssql
from lxml import etree
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
import re
import requests
from Save_database import Save_database


class parse_innert_page(object):
    def __init__(self):
        options = webdriver.ChromeOptions()
        # options.add_argument('--headless')
        options.add_experimental_option('excludeSwitches', ['enable-automation'])  # 隐藏程序模拟浏览器
        self.driver = webdriver.Chrome(chrome_options=options)
        self.wait = WebDriverWait(self.driver, 10)
        self.db = pymssql.connect(host='192.168.0.186', user='sa', password='987987abc#', port='1433',
                                  database='Crawler.Paterson', charset="utf8")
        self.cursor = self.db.cursor()
        self.sd = Save_database(self.db)
        self.hreader = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.100 Safari/537.36',
            'cookie': 'shshshfpa=2c926ff4-8d02-fa3a-ddfd-23e79906b17e-1555386146; shshshfpb=b1wXg3kUCfe2I7ILBMwx1Fg%3D%3D; __jdu=23304726; user-key=d419e56f-305f-46ee-8cd3-3a3b57e926b3; cn=0; unpl=V2_ZzNtbRJRQhRwW05TK01dBmILR1sSAEtAIgwVUHkQVFZiAhMJclRCFX0UR1JnGVsUZgsZX0tcRhxFCEdkeBBVAWMDE1VGZxBFLV0CFSNGF1wjU00zQwBBQHcJFF0uSgwDYgcaDhFTQEJ2XBVQL0oMDDdRFAhyZ0AVRQhHZHgbXgRiBRFacmdEJUU4QlB6EV8CVwIiXHIVF0lyCEdRfR4RBmUBE1hEVEQldDhF; __jdc=122270672; __jdv=122270672|baidu-pinzhuan|t_288551095_baidupinzhuan|cpc|0f3d30c8dba7459bb52f2eb5eba8ac7d_0_a7004b96ae0249d7af9df5b5389b400e|1563241128778; areaId=19; ipLoc-djd=19-1601-50283-50386; wlfstk_smdl=ic9bxne2qm3l2vmcra5148c3q0qmul4d; logintype=qq; unick=lyd250551502; pin=lyd250551502; npin=lyd250551502; _tp=fA3zuJuFuZpl6djXBzhJqA%3D%3D; pinId=3us4vgqMYLcCFevRxgpQBQ; 3AB9D23F7A4B3C9B=LQH3YXRGB4PSPIDDEHDQ6LJ3BDOICSA73HHTA7DR3UVTXZDXVD4TLG72RBV7O5UFPGUQEA2FGCXTKDRTJE5IECMXEA; wq_logid=1563271298.122409089; wxa_level=1; retina=1; cid=9; webp=1; mba_muid=23304726; sc_width=452; visitkey=67858372683121082; PPRD_P=UUID.23304726-LOGID.1563271299832.1097871109; sk_history=22075052164%2C; __wga=1563271341847.1563271299797.1563271299797.1563271299797.3.1; __jd_ref_cls=MDownLoadFloat_AppArouse; shshshfp=a39757718ecd2b1fb1bb9b1ca4a1933d; __jda=122270672.23304726.1555318285.1563334788.1563341543.33; shshshsID=1b14ac369ee8e1cf47d54aa983990008_9_1563344003435; __jdb=122270672.9.23304726|33.1563341543; thor=C515976ECD173F29CD69BDD3BAB680C7E857F218748D7A1516D4543A2F203EA8E288290E8E742AE11EBFA7565E3A2B47A1EBC30BFE25864B7CF9DC6516732CCD56B594BD6CA4816800EE297C054B420999A5E3722F169486C5B0F6DFEF178AC8831D5CCF48EAF6F624773115A42E3604D02758C31F29CE3B35596DE54A99CC2A79EDB5BE1071DD9F4998AA3F29ED5807'
        }

    def get_current_week(self):
        monday, sunday = datetime.date.today(), datetime.date.today()
        one_day = datetime.timedelta(days=1)
        while monday.weekday() != 0:
            monday -= one_day
        while sunday.weekday() != 6:
            sunday += one_day
        week = str(monday)+str('/')+str(sunday)
        return week

    def get_sku(self):
        week = self.get_current_week()
        sql = 'SELECT sku from Paterson_Product_List where sku not in (SELECT sku from Paterson_Detailed_Information where Update_week = %s) and update_week = %s'
        self.cursor.execute(sql, (str(week), str(week)))
        data = self.cursor.fetchall()
        return data

    def go_inner_page(self, sku):
        sku = str(sku).replace('\'', '').replace(',', '').replace('(', '').replace(')', '')
        url = 'https://item.jd.com/'+str(sku)+'.html'
        self.driver.get(url)

    def parse_page(self, sku):
        try:
            html = etree.HTML(self.driver.page_source)
            sku = sku[0]
            dir_structure1 = html.xpath('//div[@class="w"]/div[1]/div/a/text()')
            dir_structure2 = html.xpath('//div[contains(@class, "ellipsis")]/text()')
            Classification_list  = (str(dir_structure1)+', '+str(dir_structure2).replace(' ', '')).replace('[', '').replace(']', '').replace('\'', '').strip()
            Theme = str(html.xpath('//div[@class="sku-name"]/text()')).replace('[', '').replace(']', '').replace('\'', '').replace('\n', '').replace(' ', '').strip()
            P_price = str(html.xpath('//span[@class="p-price"]/span[2]/text()')).replace('[', '').replace(']', '').replace('\'', '').strip()
            J_Fans_price = str(html.xpath('//span[@class="p-price-fans"]/span/text()')).replace('[', '').replace(']', '').replace('\'', '').strip()
            J_xsth_sale = str(html.xpath('//span[@class="J-xsth-sale"]/a/s/text()')).replace('[', '').replace(']', '').replace('\'', '').strip()
            J_plus_price = str(html.xpath('//span[@class="p-price-plus"]/span/text()')).replace('[', '').replace(']', '').replace('\'', '').replace('￥', '').strip()
            J_user_price = str(html.xpath('//span[@class="p-price-user"]/span/text()')).replace('[', '').replace(']', '').replace('\'', '').strip()
            Comment_count = str(html.xpath('//*[@id="comment-count"]/a/text()')).replace('[', '').replace(']', '').replace('\'', '').replace('+', '').strip()
            if '万' in Comment_count:
                Comment_count = re.match('\d*', Comment_count)
                Comment_count = int(Comment_count.group(0)) * 10000
            Product_brand = html.xpath('//*[@id="parameter-brand"]/li/@title')[0]
            Detailed_information = str(html.xpath('//div[@class="p-parameter"]/ul[2]/li/text()')).replace('\n', '').replace(' ', '').replace('[', '').replace(']', '').strip()
            Shop_name = html.xpath('//*[@id="crumb-wrap"]/div/div[2]/div[2]/div[1]/div/a/text()')[0]
            Shop_star = str(html.xpath('//div[@class="star-gray"]/@title')).replace('[', '').replace(']', '').replace('\'', '')
            Commodity_evaluation = str(html.xpath('//*[@class="score-parts"]/div[1]/span/em/text()')).replace('[', '').replace(']', '').replace('\'', '').strip()
            Logistics_evaluation = str(html.xpath('//*[@class="score-parts"]/div[2]/span/em/text()')).replace('[', '').replace(']', '').replace('\'', '').strip()
            After_sales_evaluation = str(html.xpath('//*[@class="score-parts"]/div[3]/span/em/text()')).replace('[', '').replace(']', '').replace('\'', '').strip()
            Product_category = str(re.findall("类别：.*?'", str(Detailed_information))).replace('[', '').replace(']', '').replace('\"', '').replace('\'', '').strip()
            # print(Theme, P_price, J_Fans_price, J_xsth_sale, J_plus_price, J_user_price, Classification_list, Comment_count)
            # print(Product_brand, Detailed_information, Shop_name)
            # print(Shop_star, Commodity_evaluation, Logistics_evaluation, After_sales_evaluation)
            # print(Product_category)
            self.sd.save_innert_data(sku, str(Theme).replace('\n', ''), P_price, J_Fans_price, J_xsth_sale, J_plus_price, J_user_price, Classification_list\
                                     , Comment_count, Product_brand, Detailed_information, Shop_name, Shop_star, Commodity_evaluation\
                                     , Logistics_evaluation, After_sales_evaluation, Product_category, self.get_current_week())
        except IndexError:
            pass

    def skuAndgo(self):
        data = self.get_sku()
        for i in data:
            self.go_inner_page(i)
            time.sleep(1)
            self.parse_page(i)
            # self.get_Summaries_json(i)

    def Main(self):
        self.skuAndgo()
        self.db.close()


if __name__ == '__main__':
    pip = parse_innert_page()
    pip.Main()
