#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright © 2016 lizongzhe
#
# Distributed under terms of the MIT license.
import requests
from bs4 import BeautifulSoup
import re
import json
from datetime import datetime, date, timedelta
import requests.packages.urllib3.util.ssl_
import time
import logging
from esr_parser.core import request as api_request

requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS = 'ALL'
requests.adapters.DEFAULT_RETRIES = 2

logger = logging.getLogger(__name__)


def download(item_id, tmall, comment_limit=200):
    if not item_id:
        return {}

    if tmall:
        url = 'https://detail.tmall.com/item.htm?id={}'.format(item_id)
    else:
        url = 'https://item.taobao.com/item.htm?id={}'.format(
            item_id)

    resp = api_request(url)

    if tmall:
        meta = {"auction_num_id": item_id, "is_tm": 1}
        detail = parse_tm_detail(resp, meta, comment_limit)
    else:
        meta = {"auction_num_id": item_id, "is_tm": 0}
        detail = parse_detail(resp, meta, comment_limit)
    return detail


def parse_tm_detail(resp, meta={}, comment_limit=200):
    soup = BeautifulSoup(resp.content.decode('GB18030'), "html5lib")
    info = json.loads(
        re.search('{"api":.*', resp.content.decode('GB18030')).group())
    _skuMap = info['valItemInfo']['skuMap']
    _skuList = info['valItemInfo']['skuList']
    initApi = "https:" + info['initApi'] + \
        "&timestamp={}".format(datetime.now().strftime('%s000'))


    sellerId = info['rateConfig']['sellerId']
    spuId = info['rateConfig']['spuId']

    dsr_resp = api_request(
        'https://dsr-rate.tmall.com/list_dsr_info.htm?itemId={}&spuId={}&sellerId={}'.format(meta['auction_num_id'], spuId, sellerId))
    dsr_info = json.loads(re.search("\((.*)\)", dsr_resp.content).groups()[0])

    sharp_grade = ";".join(
        [elem.text for elem in soup.select('.shopdsr-score-con')])
    comment_count = dsr_info['dsr']['rateTotal']

    tags_resp = api_request(
        'https://rate.tmall.com/listTagClouds.htm?itemId={}&isAll=true&isInner=true&callback=jsonp2783'.format(meta['auction_num_id']))
    tag_info = json.loads(tags_resp.content[13:-1].decode('gb2312'))
    tags = [u"{}({})".format(tag['tag'], tag['count'])
            for tag in tag_info['tags']['tagClouds']]
    tags_str = ";".join(tags)
    description = soup.select('#J_AttrUL')[0].text

    stock_info = get_stock(meta['auction_num_id'])

    skuList = []
    for skuInfo in _skuList:
        skuInfo.update(_skuMap[";" + skuInfo['pvs'] + ";"])
        info = {}
        info['sku_id'] = skuInfo['skuId']
        info['pro_pvs'] = skuInfo['pvs']
        info['pro_stock'] = stock_info['sku'].get(
            ";" + skuInfo['pvs'] + ";", {}).get('sellableQuantity', 0)
        info['pro_name'] = skuInfo['names']
        info['pro_price'] = skuInfo['price']
        skuList.append(info)

    detail = {}
    try:
        detail['brand_name'] = re.search(u'品牌:.*', description).group().split(':')[1]
    except:
        detail['brand_name'] = u'other/其他'

    detail['pro_stock'] = stock_info['sellableQuantity']
    detail['skuList'] = skuList
    detail['pro_url'] = resp.url
    detail['pro_name'] = soup.select('.tb-detail-hd h1')[0].text.strip()
    sub_title_elem = soup.select('.tb-detail-hd p')
    detail['pro_sub_title'] = sub_title_elem and sub_title_elem[0].text or ''
    detail['pro_img_url'] = soup.select('#J_ImgBooth')[0].attrs.get('src')
    detail['express'] = tags_str
    detail['description'] = description
    detail['spu_id'] = spuId
    detail['comment_count'] = comment_count
    detail['pro_img_urls'] = ""
    detail['seller_id'] = sellerId
    detail['shop_name'] = soup.select('.slogo-shopname')[0].text
    detail['sharp_grade'] = sharp_grade
    try:
        tmall_info = get_item_info(meta['auction_num_id'], True)
        detail['month_sales'] = tmall_info['defaultModel']['sellCountDO']['sellCount']
        detail['is_rookie_league'] = tmall_info['defaultModel']['servicePromise']['has3CPromise']
    except Exception as e:
        detail['month_sales'] = None
        detail['is_rookie_league'] = None
    detail['gradeAvg'] = dsr_info['dsr']['gradeAvg']
    detail.update(meta)
    comments = parse_tmall_comments(
        detail['auction_num_id'], detail['seller_id'], comment_limit)
    detail['attention_num'] = get_attention_num(detail['auction_num_id'])
    detail['comments'] = comments
    return detail


def parse_detail(resp, meta={}, comment_limit=200):
    detail = {}
    skuMap = json.loads(re.search('skuMap\s*:(.*)', resp.content).groups()[0])
    soup = BeautifulSoup(resp.content.decode(resp.encoding), "html5lib")
    for sku_property, skuInfo in skuMap.items():
        skus = sku_property.split(';')[1:-1]
        sku_msgs = []
        for sku in skus:
            sku_msgs.append(soup.select(
                '[data-value="{}"] span'.format(sku))[0].text)
        sku_msg = " ".join(skus)
        skuMap[sku_property]['msg'] = sku_msg

    url = 'https://rate.taobao.com/detailCount.do?&callback=jsonp100&itemId={}'.format(meta[
                                                                                       'auction_num_id'])
    rate_resp = api_request(url)
    rate_count = json.loads(rate_resp.content[9:-1])['count']

    tags_url = "https://rate.taobao.com/detailCommon.htm?auctionNumId={}&callback=".format(meta[
                                                                                           'auction_num_id'])
    tags_resp = api_request(tags_url)
    tags_content = tags_resp.content
    tags_info = json.loads(tags_content[3:-1].decode('gb2312'))

    stock_info = get_stock(meta['auction_num_id'])

    skuList = []
    for sku_property, skuInfo in skuMap.items():
        info = {}
        info['sku_id'] = skuInfo['skuId']
        info['pro_pvs'] = sku_property
        try:
            info['pro_stock'] = stock_info['sku'].get(
                sku_property, {}).get('sellableQuantity', {})
            info['pro_name'] = soup.select(
                '[data-value="{}"] span'.format(sku_property[1:-1]))[0].text
        except:
            continue
        info['pro_price'] = skuInfo['price']
        skuList.append(info)

    imgs = soup.select('#J_UlThumb img')

    detail['pro_stock'] = stock_info['sellableQuantity']
    detail['skuList'] = skuList
    detail['pro_url'] = resp.url
    detail['pro_name'] = soup.select('.tb-main-title')[0].text.strip()
    detail['pro_sub_title'] = soup.select('.tb-subtitle')[0].text.strip()
    detail['pro_img_url'] = soup.select('#J_ImgBooth')[0].attrs.get('src')
    detail['express'] = u";".join([u"{}({})".format(
        tag['title'], tag['count']) for tag in tags_info['data']['impress']])
    detail['description'] = soup.select('#attributes')[0].text
    try:
        detail['brand_name'] = re.search(u'品牌:.*', detail['description']).group().split(':')[1]
    except:
        detail['brand_name'] = u'other/其他'

    detail['spu_id'] = ""
    detail['comment_count'] = rate_count
    detail['pro_img_urls'] = ";".join(
        [img.attrs.get('data-src') for img in imgs])
    detail['seller_id'] = soup.select('#J_Pine')[0].attrs.get('data-sellerid')
    try:
        detail['shop_name'] = soup.select('.tb-shop-name')[0].text
    except:
        shop_name = re.search(
            'shopName.*', resp.content).group().split(':')[1].split(',')[0].replace("'", '')
        detail['shop_name'] = json.loads('"' + shop_name + '"')
    detail['shop_grade'] = ";".join(
        [elem.text.strip() for elem in soup.select('.tb-shop-rate a')])
    detail['good_count'] = tags_info['data']['count']['good']
    detail['general_count'] = tags_info['data']['count']['normal']
    detail['poor_count'] = tags_info['data']['count']['bad']
    detail['additional'] = tags_info['data']['count']['additional']
    try:
        item_info = get_item_info(meta['auction_num_id'])
        detail['month_sales'] = item_info['defaultModel']['sellCountDO']['sellCount']
        detail['is_rookie_league'] = item_info['defaultModel']['servicePromise']['has3CPromise']
    except Exception as e:
        detail['month_sales'] = None
        detail['is_rookie_league'] = None
    detail.update(meta)
    detail['attention_num'] = get_attention_num(detail['auction_num_id'])
    comments = parse_comments(detail['auction_num_id'], comment_limit)
    detail['comments'] = comments
    return detail


def parse_comments(item_id, size=200):
    yesterday = date.today() - timedelta(1)
    yesterday = datetime(*yesterday.timetuple()[:6])

    api = 'https://rate.taobao.com/feedRateList.htm?&auctionNumId={}&currentPageNum={}&orderType=feedbackdate'
    page = 1
    comments = []
    while True:
        try:
            resp = api_request(api.format(item_id, page))
            data = json.loads(resp.content[3:-2].decode('GB18030'))
            comments += data['comments']
            page += 1
            comment_time = datetime.strptime(comments[-1]['date'].encode('utf-8'), u'%Y年%m月%d日 %H:%M'.encode('utf-8'))
            # no next page
            if page > data['maxPage']:
                break
    
            # limit 
            if size and len(comments) >= size:
                break
            
            # no size default 2 day
            if not size and comment_time < yesterday:
                break

            if not size and size > 200:
                break
        except Exception as e:
            break
    result = []
    for comment in comments:
        info = {}
        info['review_id'] = comment['rateId']
        info['creation_time'] = comment['date']
        info['nick_name'] = comment['user']['nick']
        info['score'] = comment['rate']
        pro_name = comment.get('auction', {}).get('sku', '') 
        info['pro_name'] = re.search("[^:]*$", pro_name).group()
        info['img_urls'] = ";".join([photo['url']
                                     for photo in comment['photos']])
        if comment['content'] == u'评价方未及时做出评价,系统默认好评!':
            continue
        info['comment_content'] = comment['content']
        info['comment_reply'] = comment['reply']
        info['userVipLevel'] = comment['user']['vipLevel']
        result.append(info)
    return result


def parse_tmall_comments(item_id, sellerId, size=200):
    yesterday = date.today() - timedelta(1)
    yesterday = datetime(*yesterday.timetuple()[:6])
    api = 'https://rate.tmall.com/list_detail_rate.htm?itemId={}&sellerId={}&order=1&currentPage={}&callback=jsonp&content=1'
    page = 1
    comments = []
    while True:
        try:
            resp = api_request(api.format(item_id, sellerId, page))
            data = json.loads(resp.content.strip()[6:-1].decode('GB18030'))
            comments += data['rateDetail']['rateList']
            page += 1
            comment_time = datetime.strptime(comments[-1]['rateDate'], '%Y-%m-%d %H:%M:%S')
            # no next page
            if page > data['rateDetail']['paginator']['lastPage']:
                break
    
            # limit 
            if size and len(comments) >= size:
                break
            
            # no size default 2 day
            if not size and comment_time < yesterday:
                break
            
            if not size and size > 200:
                break
        except Exception as e:
            if 'sm' in data.values():
                time.sleep(1)
                continue
            else:
                print e
                break
    result = []
    for comment in comments:
        info = {}
        info['review_id'] = comment['id']
        info['creation_time'] = datetime.fromtimestamp(
            int(comment['gmtCreateTime']) / 1000).strftime('%Y-%m-%dT%H:%M:%S')
        info['nick_name'] = comment['displayUserNick']
        attributes = comment['attributes']
#        try:
#            pro_name = re.search('sku:(\d+)', attributes).groups()[0]
#        except Exception as e:
#            pro_name = "gift" + re.search('giftBillId:(\d+)', attributes).groups()[0]
        pro_name = comment['auctionSku']
        info['pro_name'] = pro_name
        pics = comment['pics'] or []
        info['img_urls'] = ";".join([photo for photo in pics])
        info['comment_content'] = comment['rateContent']
        info['comment_reply'] = comment['reply']
        info['userVipLevel'] = comment['userVipLevel']
        result.append(info)
    return result


def get_stock(itemid):
    referer = "https://item.taobao.com/item.htm?id={}".format(itemid)

    api = "https://detailskip.taobao.com/service/getData/1/p2/item/detail/sib.htm?itemId={}&modules=dynStock".format(
        itemid)

    resp = api_request(api, headers={"referer": referer}).json()
    return resp['data']['dynStock']


def get_item_info(itemid, tmall=False):
    if tmall:
        ref = 'https://detail.tmall.com/item.htm?id={}'.format(itemid)
    else:
        ref = 'https://item.taobao.com/item.htm?id={}'.format(itemid)

    url = 'https://mdskip.taobao.com/core/initItemDetail.htm?itemId={}'.format(itemid)

    data = api_request(url, headers={'referer': ref}).json()
    return data


def get_attention_num(item_id):
    url = 'https://count.taobao.com/counter3?_ksTS=&callback=json&keys=ICCP_1_{}'.format(item_id)
    resp = api_request(url)
    return json.loads(re.search('\{.*\}', resp.content).group()).values()[0]
