# coding=utf-8
from bs4 import  BeautifulSoup
import re, urlparse
import logging
from spider.models import *
from NeedLoginException import *
logging.basicConfig(level=logging.INFO)

class HtmlParser(object):
    ##############################################################################
    #                                  ajax
    ##############################################################################

    # 从html代码中解析出url地址和数据
    def parse_productListHtml_by_ajax(self, html_cont, shop):
        # 判断内容是否为空
        if html_cont is None:
            raise Exception('parse_productListHtml 需要解析的html=None')

        soup = BeautifulSoup(html_cont, 'html.parser')
        # 设置容器
        product_list = []
        # <dd class="\&quot;detail\&quot;">
        # dd_elem['class']=[u'\\"detail\\"']
        logging.info('souptype=' + type(soup))
        dd_elem_list = soup.find_all('dd', class_=u'\\"detail\\"')
        for dd_elem in dd_elem_list:
            # <a class="\&quot;item-name"
            # u'\\"item-name'
            a_elem = dd_elem.find('a', class_=u'\\"item-name')
            # 得到商品名称
            product_name = a_elem.get_text()
            logging.info('product_name=' + product_name)
            # 得到商品url
            product_url = a_elem['href']
            logging.info('product_url=' + product_url)
            # 得到商品价格
            # span_elem['class']=[u'\\"c-price\\"']
            price_span_elem = dd_elem.find('span', class_=u'\\"c-price\\"')
            product_price = price_span_elem.get_text()
            logging.info('product_price=' + product_price)

            product = Product(name=product_name, price=product_price, url=product_url, shop=shop)
            product_list.append(product)

        return product_list

    def parse_productListHtml_list_to_product_list_by_ajax(self, productList_html_list, shop):
        product_list = []
        for productList_html in productList_html_list:
            new_product_list = self.parse_productListHtml_by_ajax(productList_html, shop)
            logging.info('这一页有' + str(len(new_product_list)) + '件商品')
            product_list.extend(new_product_list)

        return product_list

    ##############################################################################
    #                                selenium
    ##############################################################################

    # #从商品列表页面得到商品的urls
    # def _get_new_commodity_tags(self, soup):
    #     #设置容器
    #     new_urls = []
    #     #< div  class ="shop-hesper-bd grid" >
    #     #<dl class="item " data-id="556104501181">
    #     label_items = soup.find('div', class_="shop-hesper-bd grid").find_all('dl', class_="item ")
    #     #判断是否为None
    #     if label_items is None:
    #         print 'label_item is None'
    #         return
    #
    #     #分析每一个item
    #     for label_item in label_items:
    #         #<a  class ="item-name J_TGoldData" href="//item.taobao.com/item.htm?id=556104501181"
    #         label_a_url = label_item.find('a', class_="item-name J_TGoldData")
    #         #得到商品url
    #         if label_a_url.has_attr('href'):
    #             new_full_url = 'https:'+ label_a_url['href']
    #             logging.info('url= %s' % new_full_url)
    #
    #             new_urls.append(new_full_url)
    #     return new_urls

    #从html代码中解析出url地址和数据
    def parse_productListHtml(self, html_cont, shop):
        #判断内容是否为空
        if html_cont is None:
            raise Exception('parse_productListHtml 需要解析的html=None')


        #将网页内容构造成树
        soup = BeautifulSoup(html_cont, 'html.parser')

        # 设置容器
        product_list = []
        # < div  class ="shop-hesper-bd grid" >
        div_item = soup.find('div', class_="shop-hesper-bd grid")
        if div_item is None:
            raise NeedLoginException('parse_productListHtml 没爬取到商品信息，可能需要登陆了')

        # <dl class="item "
        dl_item_list = div_item.find_all('dl', class_='item')
        for dl_item in dl_item_list:
            # <img alt="男女款帽子战术帽夏季遮阳秋冬防风速干运动帽休闲精品鸭舌帽" src="//img.alicdn.com/bao/uploaded/i1/2745906633/TB2.vcbgGagSKJjy0FcXXcZeVXa_!!2745906633.png_240x240.jpg">
            product_img = dl_item.find('img')
            product_img_url = 'https:' + product_img['src']
            # <dd class="detail">
            dd_item = dl_item.find('dd', class_='detail')
            # <a  class ="item-name J_TGoldData" href="//item.taobao.com/item.htm?id=556104501181"
            a_elem = dd_item.find('a', class_="item-name J_TGoldData")
            # 得到商品名称
            product_name = a_elem.get_text()
            # 得到商品url
            product_url = 'https:' + a_elem['href']
            # 得到商品价格
            # <span class="c-price">75.00 </span>
            price_span_elem = dd_item.find('span', class_='c-price')
            product_price = price_span_elem.get_text()

            logging.info('product name=%s' % product_name)
            logging.info('product url=%s' % product_url)
            logging.info('product price=%s' % product_price)
            product = Product(name=product_name, price=product_price, url=product_url, image=product_img_url, shop=shop)

            product_list.append(product)

        # # <dd class="detail">
        # dd_item_list = div_item.find_all('dd', class_='detail')
        # for dd_item in dd_item_list:
        #     # <a  class ="item-name J_TGoldData" href="//item.taobao.com/item.htm?id=556104501181"
        #     a_elem = dd_item.find('a', class_="item-name J_TGoldData")
        #     # 得到商品名称
        #     product_name = a_elem.get_text()
        #     # 得到商品url
        #     product_url = 'https:' + a_elem['href']
        #     # 得到商品价格
        #     # <span class="c-price">75.00 </span>
        #     price_span_elem = dd_item.find('span', class_='c-price')
        #     product_price = price_span_elem.get_text()
        #
        #     logging.info('product name=%s' % product_name)
        #     logging.info('product url=%s' % product_url)
        #     logging.info('product price=%s' % product_price)
        #     product = Product(productName=product_name, productPrice=product_price, productUrl=product_url, shop=shop)
        #
        #     product_list.append(product)

        return product_list



    #解析商品的html列表
    def parse_productListHtml_list_to_product_list(self, productList_html_list, shop):
        product_list = []
        for productList_html in productList_html_list:
            #为何找不到：类中的函数也要用self.函数名
            try:
                new_product_list = self.parse_productListHtml(productList_html, shop)
                # 为什么这个log是乱码？
                logging.info('这一页有'+ str(len(new_product_list))+'件商品')
                product_list.extend(new_product_list)
            except NeedLoginException:
                logging.info('parse_productListHtml失败  可能是需要登陆了')
        return product_list

    ##############################################################################
    #                           爬取商品细节页面
    ##############################################################################

    def _get_product_colors_and_sizes(self, soup, product):
        #<div id="J_isku" class="tb-key tb-key-sku"
        div_item = soup.find('div', id='J_isku', class_='tb-key tb-key-sku')

        #得到尺码
        # <ul data-property="尺码/鞋码" class="J_TSaleProp tb-clearfix">
        ul_item = soup.find('ul', class_="J_TSaleProp tb-clearfix")

        size_list = []
        if ul_item:
            # 如果尺寸名称不含有'码'
            if '码' not in ul_item.get('data-property'):
                logging.info('html_parser_selenium 这里的尺寸名称可能有问题:' + ul_item.get('data-property'))

            # 获得尺寸对象
            # 如果不存在则加入数据库
            span_items = ul_item.find_all('span')
            # print '尺码数量=' + str(len(span_items))
            for span_item in span_items:
                size_text = span_item.get_text()
                # 得到尺寸实例
                size = Size.objects.get_or_create(name=size_text, product=product)[0]
                size_list.append(size)
                print size
        else:
            #如果没有这个尺码，则设为默认的
            size = Size.objects.get_or_create(name='default', product=product)[0]
            size_list.append(size)
            # 有什么替代方法？



        # 得到颜色
        # <ul data-property="颜色分类" class="J_TSaleProp tb-img tb-clearfix">
        ul_item = soup.find('ul', class_="J_TSaleProp tb-img tb-clearfix")
        color_list = []
        if ul_item:
            # 如果尺寸名称不含有'码'
            if '颜色' not in ul_item.get('data-property'):
                logging.info('html_parser_selenium 这里的颜色名称可能有问题:' + ul_item.get('data-property'))
            # 获得颜色对象
            # 如果不存在则加入数据库
            span_items = ul_item.find_all('span')
            # print '颜色数量=' + str(len(span_items))
            for span_item in span_items:
                color_text = span_item.get_text()
                # 得到颜色实例
                color = Color.objects.get_or_create(name=color_text, product=product)[0]
                color_list.append(color)
        else:
            color = Color.objects.get_or_create(name='default', product=product)[0]
            color_list.append(color)

        return size_list, color_list



        # <a href="javascript:;" style="background:url(//gd2.alicdn.com/imgextra/i1/780995745/TB2.S.BbkQkyKJjSspaXXc_ipXa_!!780995745.jpg_30x30.jpg) center no-repeat;" data-spm-anchor-id="2013.1.iteminfo.11">

        #     #如果是颜色种类块
        #     elif '颜色' in row_elem.get_attribute('data-property'):
        #         #self.writeInFile('driver.page_source.txt', driver.page_source)
        #         color_span_list = row_elem.find_elements_by_tag_name('span')
        #         logging.info('有' + str(len(color_span_list)) + '种颜色')
        #
        #         for color_span in color_span_list:
        #             color_text = color_span.text
        #             logging.info('颜色有：' + color_text)
        #             #print color_text

    def parse_product_html(self, html_cont, product):
        # 判断内容是否为空
        if html_cont is None:
            return

        soup = BeautifulSoup(html_cont, 'html.parser')

        # 从树中查找想要的新地址和内容
        return self._get_product_colors_and_sizes(soup, product)







