#coding:utf-8
__author__ = 'zhaoyoucai'

import urllib2,urllib
from bs4 import BeautifulSoup
import re
import time,datetime
import requests
import random
from urlparse import *

from django.conf import settings


import MySQLdb
import pickle

import hashlib

import json


import requests.packages.urllib3.util.ssl_
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS = 'ALL'


import sys
reload(sys)
sys.setdefaultencoding( "utf-8" )


password = settings.DATABASES["default"]["PASSWORD"]
dbname = settings.DATABASES["default"]["NAME"]
dbuser = settings.DATABASES["default"]["USER"]
host = settings.DATABASES["default"]["HOST"]
port = settings.DATABASES["default"]["PORT"]

if host == '':
    host = 'localhost'

if port == '':
    port = 3306


def randHeader():

    head_connection = ['Keep-Alive','close']
    head_accept = ['text/html, application/xhtml+xml, */*']
    head_accept_language = ['zh-CN,fr-FR;q=0.5','en-US,en;q=0.8,zh-Hans-CN;q=0.5,zh-Hans;q=0.3']
    head_user_agent = ['Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko',
                       'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.95 Safari/537.36',
                       'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; rv:11.0) like Gecko)',
                       'Mozilla/5.0 (Windows; U; Windows NT 5.2) Gecko/2008070208 Firefox/3.0.1',
                       'Mozilla/5.0 (Windows; U; Windows NT 5.1) Gecko/20070309 Firefox/2.0.0.3',
                       'Mozilla/5.0 (Windows; U; Windows NT 5.1) Gecko/20070803 Firefox/1.5.0.12',
                       'Opera/9.27 (Windows NT 5.2; U; zh-cn)',
                       'Mozilla/5.0 (Macintosh; PPC Mac OS X; U; en) Opera 8.0',
                       'Opera/8.0 (Macintosh; PPC Mac OS X; U; en)',
                       'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.12) Gecko/20080219 Firefox/2.0.0.12 Navigator/9.0.0.6',
                       'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Win64; x64; Trident/4.0)',
                       'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)',
                       'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; .NET4.0C; .NET4.0E)',
                       'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Maxthon/4.0.6.2000 Chrome/26.0.1410.43 Safari/537.1 ',
                       'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; .NET4.0C; .NET4.0E; QQBrowser/7.3.9825.400)',
                       'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:21.0) Gecko/20100101 Firefox/21.0 ',
                       'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.92 Safari/537.1 LBBROWSER',
                       'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; BIDUBrowser 2.x)',
                       'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/3.0 Safari/536.11']


    header = {
        'Connection': head_connection[0],
        'Accept': head_accept[0],
        'Accept-Language': head_accept_language[1],
        'User-Agent': head_user_agent[random.randrange(0,len(head_user_agent))],
        'Referer': 'http://www.aliexpress.com/',
        'Host':'www.aliexpress.com',
    }
    return header


def LinkDB():
    conn=MySQLdb.connect(host=host,user=dbuser,passwd=password,db=dbname,port=port,charset='utf8')
    #conn=MySQLdb.connect(host='120.76.134.53',user='remote_user',passwd='tradetrends',db='tradetrends',port=3306,charset='utf8')
    return conn

def getStoreInfo(storeId):
    data = dict()
    err = 0
    feedbackUrl = 'https://www.aliexpress.com/store/feedback-score/%s.html'%(storeId)
    req = requests.get(feedbackUrl,params=randHeader())
    html = req.content
    html = BeautifulSoup(html,'html.parser')
    iframe = html.find('iframe',id='detail-displayer')
    if iframe:
        url = iframe.get('src')
        url = 'https:%s'%(url)
        req = requests.get(url,params=randHeader())
        status = req.status_code
        if status == 404:
            return err
        elif status == 301:
            return err
        elif status == 200:
            html = req.content
            html = BeautifulSoup(html,'html.parser')
            try:
                sellerSummary = html.find('table','summary-tb')
                if sellerSummary:
                    info = sellerSummary.find_all('tr')
                    for item in info:
                        name = item.th.get_text().strip()
                        if 'Seller:' == name:
                            data['Seller'] = item.td.get_text().strip()
                        if 'Positive Feedback' in name:
                            PositiveFeedback = item.td.get_text().strip()
                            data['PositiveFeedback'] = PositiveFeedback.replace('%','')
                        if 'Feedback Score' in name:
                            FeedbackScore = item.td.get_text().strip()
                            data['FeedbackScore'] = FeedbackScore.replace(',','')
                        if 'Since:' in name:
                            openTime = item.td.get_text().strip()
                            data['openTime'] = formatDate(openTime)
                dsr = html.find('div',id='feedback-dsr')
                if dsr:
                    info = dsr.find_all('tr')
                    for item in info:
                        name = item.th.get_text().strip()
                        if 'Described' in name:
                            txt = item.find('span','dsr-text')
                            data['ItemAsDescribed'] = txt.em.get_text().strip()
                        if 'Communication' in name:
                            txt = item.find('span','dsr-text')
                            data['Communication'] = txt.em.get_text().strip()
                        if 'Speed' in name:
                            txt = item.find('span','dsr-text')
                            data['ShippingSpeed'] = txt.em.get_text().strip()

                return data
            except Exception as e:
                print(e)
                return err

    return err


def translate(word):

    app_id = '20161031000031072'
    app_key = '_OP9_QNAB5p1kKwVNx1X'
    salt = int(time.time())

    #拼接签名
    signstr ='%s%s%d%s'%(app_id,word,salt,app_key)
    m = hashlib.md5()
    m.update(signstr)
    sign = m.hexdigest()

    url = 'http://api.fanyi.baidu.com/api/trans/vip/translate?q=%s&from=auto&to=zh&appid=%s&salt=%d&sign=%s'%(word,app_id,salt,sign)

    r = requests.get(url)
    data = r.content

    data = json.loads(data)

    return data["trans_result"][0]["dst"]



def get_productinfo(productID,productUrl):
    data = dict()
    req = requests.get(productUrl,params=randHeader())
    status = req.status_code
    err = 0
    if status == 404:
        return 404
    elif status == 301:
        return 301
    elif status == 200:
        html = req.content
        html = BeautifulSoup(html,'html.parser')
        try:
            price = html.find(id='j-sku-price')
            if price != None:
                price = price.get_text()
                price = price.split(' - ')
                if len(price)>1:
                    data["lowPrice"] = price[0].replace(',','')
                    data["highPrice"] = price[1].replace(',','')
                else:
                    data["lowPrice"] = data["highPrice"] = price[0].replace(',','')
            else:
                #data["lowPrice"] = data["highPrice"] = '0'
                err =1
        except Exception as e:
            print(e)
            return 0

        try:
            discount = html.find('span','p-discount-rate')
            if discount != None:
                discount = discount.get_text()
                data["discount"] = discount.replace('% off','')
            else:
                #data["discount"] = '0'
                err =1
        except Exception as e:
            print(e)
            return 0

        try:
            _csrf_token = html.find(id="_csrf_token")
            if _csrf_token != None:
                _csrf_token = _csrf_token.get('value')
                serverTime = html.find(id="serverTime").get('value')

                wishlist_item = 'http://us.ae.aliexpress.com/wishlist/wishlist_item_count.htm?itemtype=product&itemid=%s&_csrf_token_=%s&_=%s'%(productID,_csrf_token,serverTime)

                #req = urllib2.Request(wishlist_item)
                #wishList = urllib2.urlopen(req,timeout=30).read()
                req = requests.get(wishlist_item,params=randHeader())
                wishList = req.content
                rule = r'(\d+)'
                data["wishList"] = re.search( rule, wishList, re.M|re.I).group()
            else:
                #data["wishList"] = '0'
                err =1
        except Exception as e:
            print(e)
            return 0

        try:
            orders = html.find(id="j-order-num")
            if orders != None:
                orders = orders.get_text()
                orders =  orders.replace(' orders','')
                orders =  orders.replace(' order','')
                data["orders"] = orders.replace(',','')
            else:
                #data["orders"] = '0'
                err =1
        except Exception as e:
            print(e)
            return 0

        try:
            votes = html.find('span','rantings-num')
            if votes != None:
                votes = votes.get_text()
                votes = re.search( rule, votes, re.M|re.I).group()
                data["votes"] = votes.replace(',','')
            else:
                data["votes"] = '0'
        except Exception as e:
            print(e)
            return 0

        try:
            averageStarRating = html.find('span','percent-num')
            if averageStarRating != None:
                data["averageStarRating"] = averageStarRating.get_text()
                '''
                #获取各评分数量
                starList = html.find_all('span','r-num')
                star = []
                for item in starList:
                    starnum = item.get_text()
                    star.append(starnum.replace(',',''))
                data["star"] = star
                '''
            else:
                #data["averageStarRating"] = '0'
                #data["star"] = ['0','0','0','0','0']
                err =1
        except Exception as e:
            print(e)
            return 0

        '''
        #获取评价
        try:
            feedbackLink = html.find(id='feedback')
            if feedbackLink != None:
                feedbackLink = feedbackLink.iframe.get('thesrc')
                feedbackLink = urllib.quote(feedbackLink,safe='+?&=:/')
                feedbackLink = 'http:%s'%(feedbackLink)
                req = urllib2.Request(feedbackLink)
                feedbackhtml = urllib2.urlopen(req).read()

                feedbackhtml = BeautifulSoup(feedbackhtml,'html.parser')

                allFeedback = feedbackhtml.find('span','f-title')
                if allFeedback != None:
                    data["allFeedback"] = allFeedback.em.get_text()
                feedbackList = feedbackhtml.find('div','f-filter-list')
                if feedbackList != None:
                    feedbackList = feedbackList.find_all('label')
                    feedback = []
                    for item in feedbackList:
                        feedback.append(item.em.get_text())

                    data["feedbackWithPictures"] = feedback[0]
                    data["feedbackWithPersonalInformation"] = feedback[1]
                    data["additionalFeedback"] = feedback[2]
                else:
                    data["allFeedback"] = data["feedbackWithPictures"] = data["feedbackWithPersonalInformation"] = data["additionalFeedback"] = '0'
            else:
                #data["allFeedback"] = data["feedbackWithPictures"] = data["feedbackWithPersonalInformation"] = data["additionalFeedback"] = '0'
                err =1
        except Exception as e:
            print(e)
            return 0

        #获取包裹信息
        try:
            package = {}
            packagesBlock = html.find('ul','product-packaging-list')
            if packagesBlock != None:
                packagesBlock = packagesBlock.find_all('li','packaging-item')

                for item in packagesBlock:
                    params = item.find('span','packaging-title').get_text().replace(':','')
                    values = item.find('span','packaging-des').get_text().replace('"',' ')
                    package[params] = values

                data["package"] = package
            else:
                err =1
        except Exception as e:
            print(e)
            return 0
        '''

        if err:
            return 0
        else:
            return data

    else:
        return 0


def get_productionMore(productUrl):
    rule = r'(\d+)'
    data = dict()
    data["productLink"] = productUrl.split('?')[0]
    productUrl = productUrl.replace('https','http')
    req = requests.get(productUrl,params=randHeader())
    status = req.status_code
    if status == 404:
        return 404
    elif status == 301:
        return 301
    elif status == 200:
        html = req.content
        html = BeautifulSoup(html,'html.parser')
        try:
            p = html.find('link',attrs={"media": "only screen and (max-width: 640px)"}).get('href')
            data["productID"] = re.search( rule, p, re.M|re.I).group()
        except Exception as e:
            print(e)
            print('获取产品ID失败')
            return 0
        try:
            data["productTitle"] = html.find('h1','product-name').get_text()
        except Exception as e:
            print(e)
            print('获取产品Title失败')
            return 0
        try:
            s = html.find('span','store-number').get_text()
            data["storeID"] = re.search( rule, s, re.M|re.I).group()
        except Exception as e:
            print(e)
            print('获取产品storeID失败')
            return 0
        try:
            #c = html.find('input','ui-breadcrumb').h2.a.get('href')
            #data["categoryID"] = re.search( rule, c, re.M|re.I).group()
            rule2 = r'window.runParams.categoryId="([0-9]*)";'
            result = re.search(rule2,req.content)
            if result.group(1):
                categoryId = result.group(1)
            else:
                categoryId = '0'
            data["categoryID"] = categoryId
        except Exception as e:
            print(e)
            print('获取产品categoryID失败')
            return 0
        try:
            data["thumb"] = html.find('a','ui-image-viewer-thumb-frame').img.get('src')
        except Exception as e:
            print(e)
            print('获取产品thumb失败')
            return 0
        try:
            price = html.find(id='j-sku-price').get_text()
            price = price.split(' - ')
            if len(price)>1:
                data["lowPrice"] = price[0].replace(',','')
                data["highPrice"] = price[1].replace(',','')
            else:
                data["lowPrice"] = data["highPrice"] = price[0].replace(',','')
        except Exception as e:
            print(e)
            print('获取产品lowPrice失败')
            return 0

        try:
            priceCurrency = html.find(attrs={"itemprop": "priceCurrency"})
            if priceCurrency:
                data["currency"] = priceCurrency.get('content').strip()
            else:
                data["currency"] = ''
        except Exception as e:
            print(e)
            print('获取产品currency失败')
            return 0

        try:
            discount = html.find('span','p-discount-rate')
            if discount != None:
                discount = discount.get_text()
                data["discount"] = discount.replace('% off','')
            else:
                data["discount"] = '0'
        except Exception as e:
            print(e)
            print('获取产品discount失败')
            return 0

        if data.has_key('productID') and data["productID"]:

            try:
                _csrf_token = html.find(id="_csrf_token").get('value')
                serverTime = html.find(id="serverTime").get('value')

                wishlist_item = 'http://us.ae.aliexpress.com/wishlist/wishlist_item_count.htm?itemtype=product&itemid=%s&_csrf_token_=%s&_=%s'%(data["productID"],_csrf_token,serverTime)

                #req = urllib2.Request(wishlist_item)
                #wishList = urllib2.urlopen(req,timeout=30).read()
                req = requests.get(wishlist_item,params=randHeader())
                wishList = req.content
                data["wishList"] = re.search( rule, wishList, re.M|re.I).group()
            except Exception as e:
                print(e)
                print('获取产品wishList失败')
                #return 0
                data["wishList"] = '0'

        try:
            orders = html.find(id="j-order-num")
            #print(orders)
            if orders != None:
                orders = orders.get_text()
                orders =  orders.replace(',','')
                #print(orders)
                data["orders"] =  re.search( rule, orders, re.M|re.I).group()
            else:
                data["orders"] = '0'
        except Exception as e:
            print(e)
            print('获取产品orders失败')
            return 0

        try:
            votes = html.find('span','rantings-num')
            if votes != None:
                votes = votes.get_text()
                votes = re.search( rule, votes, re.M|re.I).group()
                data["votes"] = votes.replace(',','')
            else:
                data["votes"] = '0'
        except Exception as e:
            print(e)
            print('获取产品votes失败')
            return 0

        try:
            averageStarRating = html.find('span','percent-num')
            if averageStarRating != None:
                data["averageStarRating"] = averageStarRating.get_text()
                '''
                #获取各评分数量
                starList = html.find_all('span','r-num')
                star = []
                for item in starList:
                    starnum = item.get_text()
                    star.append(starnum.replace(',',''))
                data["star"] = star
                '''
            else:
                data["averageStarRating"] = '0'
                #data["star"] = ['0','0','0','0','0']
        except Exception as e:
            print(e)
            print('获取产品averageStarRating失败')
            return 0

        '''
        #获取评论
        try:
            feedbackLink = html.find(id='feedback')
            if feedbackLink != None:
                feedbackLink = feedbackLink.iframe.get('thesrc')
                feedbackLink = urllib.quote(feedbackLink,safe='+?&=:/')
                feedbackLink = 'http:%s'%(feedbackLink)
                req = urllib2.Request(feedbackLink)
                feedbackhtml = urllib2.urlopen(req).read()

                feedbackhtml = BeautifulSoup(feedbackhtml,'html.parser')

                allFeedback = feedbackhtml.find('span','f-title')
                if allFeedback != None:
                    data["allFeedback"] = allFeedback.em.get_text()
                feedbackList = feedbackhtml.find('div','f-filter-list')
                if feedbackList != None:
                    feedbackList = feedbackList.find_all('label')
                    feedback = []
                    for item in feedbackList:
                        feedback.append(item.em.get_text())

                    data["feedbackWithPictures"] = feedback[0]
                    data["feedbackWithPersonalInformation"] = feedback[1]
                    data["additionalFeedback"] = feedback[2]
                else:
                    data["allFeedback"] = data["feedbackWithPictures"] = data["feedbackWithPersonalInformation"] = data["additionalFeedback"] = '0'
            else:
                data["allFeedback"] = data["feedbackWithPictures"] = data["feedbackWithPersonalInformation"] = data["additionalFeedback"] = '0'
        except Exception as e:
            print(e)
            print('获取产品allFeedback失败')
            return 0

        #获取包裹信息
        try:
            package = {}
            packagesBlock = html.find('ul','product-packaging-list')
            packagesBlock = packagesBlock.find_all('li','packaging-item')

            for item in packagesBlock:
                params = item.find('span','packaging-title').get_text().replace(':','')
                values = item.find('span','packaging-des').get_text().replace('"',' ')
                package[params] = values

            data["package"] = package
        except Exception as e:
            print(e)
            print('获取产品package失败')
            return 0
        '''

        return data

    else:
        return 0



def getPackageDetail(productUrl):
    data = dict()
    productUrl = productUrl.replace('https','http')
    req = requests.get(productUrl,params=randHeader())
    html = req.content
    html = BeautifulSoup(html,'html.parser')
    try:
        #packagesBlock = html.find('ul','product-packaging-list')
        packagesBlock = html.find_all('li','packaging-item')

        if len(packagesBlock)>0:
            for item in packagesBlock:
                params = item.find('span','packaging-title').get_text().replace(':','')
                #values = item.find('span','packaging-des').get_text().replace('"',' ')

                #print(params)

                if 'Size' in params:
                    size = item.find('span','packaging-des').get('rel').split('|')
                    #print(size)
                    data["Length"] = int(size[0])
                    data["Width"] = int(size[1])
                    data["Height"] = int(size[2])

                elif 'Weight' in params:
                    wt = item.find('span','packaging-des').get('rel')
                    data["Weight"] = int(float(wt)*1000)

                #package[params] = values

            data["error"] = 0
        else:
            data["error"] = 1
            data["msg"] = '获取产品package失败!'

    except Exception as e:
        data["msg"] = str(e)
        data["error"] = 1

    return data



def getHtml(word,page):
    baseUrl = 'http://www.aliexpress.com/wholesale?site=glo&g=y&SearchText='
    word = urllib2.quote(word)
    td = datetime.datetime.strftime(datetime.datetime.now(),'%Y%m%d%H%M%S')
    baseUrl = baseUrl+'%s&page=%d&initiative_id=SB_%s&shipCountry=US&needQuery=n'%(word,page,td)
    #req = urllib2.Request(baseUrl)
    #html = urllib2.urlopen(req).read()
    req = requests.get(baseUrl,params=randHeader())
    html = req.content
    html = BeautifulSoup(html,"html.parser")
    block = html.find(id="hs-below-list-items")
    catBlock = block.find_all('li','list-item')
    results = html.find('strong','search-count').get_text()
    results = int(results.replace(',',''))
    page_end = html.find('span','page-end')
    if page_end == None:
        page_end = 0
    else:
        page_end = 1

    return catBlock,results,page_end

def getHtmlbyUrl(url,page):
    url = url+'&page=%d'%(page)
    #req = urllib2.Request(url)
    #html = urllib2.urlopen(req).read()
    req = requests.get(url,params=randHeader())
    html = req.content
    html = BeautifulSoup(html,"html.parser")
    block = html.find(id="hs-below-list-items")
    catBlock = block.find_all('li','list-item')
    results = html.find('strong','search-count').get_text()
    results = int(results.replace(',',''))
    page_end = html.find('span','page-end')
    if page_end == None:
        page_end = 0
    else:
        page_end = 1

    return catBlock,results,page_end

'''
def getItem(item):
    rule = r'(\d+)'
    qrdata = item.get('qrdata')
    qrdata = qrdata.split('|')
    categoryID = qrdata[0]
    productID = qrdata[1]
    productTitle = item.find('h3').a.get('title')
    productLink = item.find('a','product').get('href')
    productLink = productLink.split('?')[0]
    storeLink = item.find('a','store')
    storeLink = storeLink.get('href')
    storeID = re.search( rule, storeLink, re.M|re.I).group()
    thumb = item.find('img','picCore').get('src')
    if thumb == None:
        thumb = item.find('img','picCore').get('image-src')
    values = (categoryID,productID,productTitle,storeID,productLink,thumb)

    return values
'''

def getItem(item):
    data = dict()
    rule = r'(\d+)'
    qrdata = item.get('qrdata')
    if qrdata is None:
        return 0
    qrdata = qrdata.split('|')
    data["categoryID"] = qrdata[0]
    data["productID"] = qrdata[1]
    data["productTitle"] = item.find('h3').a.get('title')
    productLink = item.find('a','product').get('href')
    data["productLink"] = productLink.split('?')[0]
    storeLink = item.find('a','store')
    storeLink = storeLink.get('href')
    data["storeID"] = re.search( rule, storeLink, re.M|re.I).group()
    thumb = item.find('img','picCore').get('src')
    if thumb == None:
        data["thumb"] = item.find('img','picCore').get('image-src')
    else:
        data["thumb"] = thumb

    return data

'''
def getItemMore(item):
    rule = r'(\d+)'
    qrdata = item.get('qrdata')
    qrdata = qrdata.split('|')
    categoryID = qrdata[0]
    productID = qrdata[1]
    productTitle = item.find('h3').a.get('title')
    productLink = item.find('a','product').get('href')
    productLink = productLink.split('?')[0]
    storeLink = item.find('a','store')
    storeLink = storeLink.get('href')
    storeID = re.search( rule, storeLink, re.M|re.I).group()
    thumb = item.find('img','picCore').get('src')
    if thumb == None:
        thumb = item.find('img','picCore').get('image-src')

    price = item.find(attrs={"itemprop": "price"}).get_text()
    price = price.split('$')[1]
    price = price.split(' - ')
    lowPrice = price[0]
    lowPrice = lowPrice.replace(',','')
    if len(price)>1:
        highPrice = price[1]
        highPrice = highPrice.replace(',','')
    else:
        highPrice = lowPrice

    starRating = item.find('span','star-s')
    if starRating is None:
        starRating = '0.0'
    else:
        starRating = starRating.get('title')
        starRating = starRating.replace('Star Rating: ','')
        starRating = starRating.replace(' out of 5','')

    votes = item.find('a','rate-num')
    if votes is None:
        votes = '0'
    else:
        votes = votes.get_text()
        votes = votes.replace(',','')
        votes = re.search( rule, votes, re.M|re.I).group()

    orders = item.find('a','order-num-a')
    if orders is None:
        orders = '0'
    else:
        orders = orders.get_text()
        orders = orders.replace(',','')
        orders = re.search( rule, orders, re.M|re.I).group()

    values = (categoryID,productID,productTitle,storeID,productLink,thumb,lowPrice,highPrice,starRating,votes,orders)

    return values
'''


def getItemMore(item):
    data = dict()
    rule = r'(\d+)'
    qrdata = item.get('qrdata')
    if qrdata is None:
        return data
    qrdata = qrdata.split('|')
    data["categoryID"] = qrdata[0]
    data["productID"] = qrdata[1]
    data["productTitle"] = item.find('h3').a.get('title')
    productLink = item.find('a','product').get('href')
    data["productLink"] = productLink.split('?')[0]
    storeLink = item.find('a','store')
    storeLink = storeLink.get('href')
    data["storeID"] = re.search( rule, storeLink, re.M|re.I).group()
    thumb = item.find('img','picCore').get('src')
    if thumb == None:
        data["thumb"] = item.find('img','picCore').get('image-src')
    else:
        data["thumb"] = thumb

    price = item.find(attrs={"itemprop": "price"}).get_text()
    currency = price.split('$')[0]
    if 'US' in currency:
        data["currency"] = 'USD'
    else:
        data["currency"] = currency.strip()
    price = price.split('$')[1]
    price = price.split(' - ')
    lowPrice = price[0]
    data["lowPrice"] = lowPrice.replace(',','')
    if len(price)>1:
        highPrice = price[1]
        data["highPrice"] = highPrice.replace(',','')
    else:
        data["highPrice"] = data["lowPrice"]

    starRating = item.find('span','star-s')
    if starRating is None:
        data["starRating"] = '0.0'
    else:
        starRating = starRating.get('title')
        starRating = starRating.replace('Star Rating: ','')
        data["starRating"] = starRating.replace(' out of 5','')

    votes = item.find('a','rate-num')
    if votes is None:
        data["votes"] = '0'
    else:
        votes = votes.get_text()
        votes = votes.replace(',','')
        data["votes"] = re.search( rule, votes, re.M|re.I).group()

    orders = item.find('a','order-num-a')
    if orders is None:
        data["orders"] = '0'
    else:
        orders = orders.get_text()
        orders = orders.replace(',','')
        data["orders"] = re.search( rule, orders, re.M|re.I).group()


    return data




def filter_date(date):
    tm = time.strptime(date,'%d %b %Y %H:%M')
    md = datetime.datetime(tm.tm_year,tm.tm_mon,tm.tm_mday,tm.tm_hour,tm.tm_min)
    return md


def get_transactions(productId,page):
    regex = re.compile(r'\\(?![/u"])')
    baseUrl = 'http://feedback.aliexpress.com/display/evaluationProductDetailAjaxService.htm'
    startUrl = baseUrl+'?productId=%s&type=default&page=%d'%(productId,page)
    #req = urllib2.Request(startUrl)
    #data = urllib2.urlopen(req).read()
    req = requests.get(startUrl,params=randHeader())
    data = req.content
    data = regex.sub(r"\\\\", data)

    return data


def getHtmlAlibaba(url):
    headers = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
           'Accept-Encoding': 'gzip, deflate, compress',
           'Accept-Language': 'zh-CN,zh;q=0.8',
           'Cache-Control': 'max-age=0',
           'Connection': 'keep-alive',
           'Referer': 'https://www.1688.com/',
           'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36'}

    s = requests.Session()
    s.headers.update(headers)
    r = s.get(url)
    html = r.content

    return html


def getShopName(html):
    #rule = r'var totalSize = \'([0-9]*)\';'
    rule = r'shopName         : \'(.*)\','
    regex = re.compile(r'\\(?![/u"])')
    result = re.search(rule,html)
    if result.group(1):
        shopName = result.group(1)
        #shopName = regex.sub(r"\\\\", shopName)
        shopName = unicode(shopName)
        shopName = regex.sub(r"\\\\", shopName)
    else:
        shopName = ''
    return shopName


def getGoodsInfo(url):
    data = dict()
    headers = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
           'Accept-Encoding': 'gzip, deflate, compress',
           'Accept-Language': 'zh-CN,zh;q=0.8',
           'Cache-Control': 'max-age=0',
           'Connection': 'keep-alive',
           'Referer': 'https://www.1688.com/',
           'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/42.0.2311.152 Safari/537.36'}

    s = requests.Session()
    s.headers.update(headers)
    try:
        r = s.get(url)
        html = r.content

        html = BeautifulSoup(html,'html.parser')

        link = urlparse(url)
        domain = link[1]

        if 'detail.1688.com' in domain:
            thumbnails =[]
            thumbHtml = html.find(id='dt-tab')
            thumbHtml = thumbHtml.find_all('div','vertical-img')
            if thumbHtml:
                firstImg = thumbHtml[:5]
                if firstImg:
                    for s in firstImg:
                        img = s.find('img')
                        if img:
                            imgUrl = img.get('src')
                            imgUrl = imgUrl.replace('.60x60','')
                            imgAlt = img.get('alt')
                            thumbnails.append({"imgUrl":imgUrl,"imgAlt":imgAlt})
                            data["title"] = imgAlt


            companyHtml = html.find('a','company-name')
            if companyHtml:
                companyName = companyHtml.get_text().strip()
            else:
                companyName = ''

            #data['thumbnails'] = thumbnails

            data['company'] = companyName
            data['thumb'] = thumbnails[0]['imgUrl']

        elif 'item.taobao.com' in domain:
            thumb = html.find('img',id='J_ImgBooth')
            title = html.find('h3','tb-main-title')
            shop = html.find('div','tb-shop-name')

            try:
                desc = html.find(attrs={"name": "description"}).get('content')
                desc = desc.replace(u'欢迎前来淘宝网实力旺铺，','')
                desc = desc.split(u'，')[-2]
                desc = desc.split(u'的')[1]
                txt = desc.replace(u'实力旺铺','')
            except:
                txt = ''

            if thumb:
                imgUrl = thumb.get('src')
                if not 'http' in imgUrl:
                    imgUrl = 'https:%s'%imgUrl
                data['thumb'] = imgUrl
            else:
                data['thumb'] = ''

            if title:
                data['title'] = title.get_text().strip()
            else:
                data['title'] = ''

            if shop:
                data['company'] = shop.a.get('title')
            elif txt:
                data['company'] = txt
            else:
                data['company'] = ''

        else:
            data['error'] = 1
            data['msg'] = u'未能识别的产品链接'

    except:
        data['error'] = 1
        data['msg'] = u'无法打开页面'


    return data


def getSkuInfo(url):
    data = dict()
    try:
        r = requests.get(url,params=randHeader())
        html = r.content

        html = BeautifulSoup(html,'html.parser')

        thumb = html.find_all('span','img-thumb-item')

        thumb = thumb[0].img.get('src')
        thumb = thumb.replace('_50x50','')

        data["thumb"] = thumb

        sku_img = html.find_all('li','item-sku-image')

        data["skuImg"] = []

        if len(sku_img)<1:
            data["skuImg"].append({"img":thumb,"name":''})

        else:
            for item in sku_img:
                img = item.img.get('bigpic')
                name = item.img.get('title')
                data["skuImg"].append({"img":img,"name":name})
    except:
        pass

    return data


def formatDate(dtstr):
    d = dtstr.split(' ')[0]
    m = dtstr.split(' ')[1]
    y = dtstr.split(' ')[2]

    if m =='Jan':
        m = '01'
    elif m == 'Feb':
        m = '02'
    elif m == 'Mar':
        m = '03'
    elif m == 'Apr':
        m = '04'
    elif m == 'May':
        m = '05'
    elif m == 'June':
        m = '06'
    elif m == 'July':
        m = '07'
    elif m == 'Aug':
        m = '08'
    elif m == 'Sept':
        m = '09'
    elif m == 'Oct':
        m = '10'
    elif m == 'Nov':
        m = '11'
    elif m == 'Dec':
        m = '12'

    dt = '%s-%s-%s'%(y,m,d)

    return dt

def datelist(start, end):
    #start_date = datetime.date(*start)
    #end_date = datetime.date(*end)

    start_date = datetime.datetime.strptime(start,'%Y-%m-%d')
    end_date = datetime.datetime.strptime(end,'%Y-%m-%d')


    result = []
    curr_date = start_date
    while curr_date != end_date:
        result.append("%04d-%02d-%02d" % (curr_date.year, curr_date.month, curr_date.day))
        curr_date += datetime.timedelta(1)
    result.append("%04d-%02d-%02d" % (curr_date.year, curr_date.month, curr_date.day))
    return result


def calc(x,y):
    sum1 = 0
    sum2 = 0
    for item in x:
        sum1 += int(item["orders"])
    for item in y:
        sum2 += int(item["orders"])

    if sum1 != 0:
        return round(float(sum2-sum1)/sum1,2)
    else:
        return round(0,2)

def calculateScore(productId):
    conn = LinkDB()
    cur = conn.cursor()
    sql ='SELECT DATE_FORMAT( `pub_date`, "%Y-%m-%d" ) as "pubdate", COUNT( * ) as "orders", SUM(`quantity` >1) as "multorder",'
    sql +='SUM(`price` * `quantity`) as "Money" FROM `transaction` WHERE product_id=%s '%(productId)
    sql +='and date_sub(curdate(), INTERVAL 60 DAY) <= date(`pub_date`) GROUP BY DATE_FORMAT( `pub_date`, "%Y-%m-%d" )'

    cur.execute(sql)

    transData = dict()

    transData["RECORDS"] = []
    transDate = []
    for item in cur:
        pubdate = item[0]
        orders = bytes(item[1])
        multorder = bytes(item[2])
        money = str(item[3])
        transData["RECORDS"].append({"pubdate":pubdate,"orders":orders,"multorder":multorder,"money":money})
        transDate.append(pubdate)

    d1 = datetime.datetime.now()
    d2 = d1 + datetime.timedelta(days=-59)

    calendar = datelist(str(d2.date()),str(d1.date()))

    for dt in calendar:
        if dt not in transDate:
            transData["RECORDS"].append({u'pubdate':unicode(dt),u'money':u'0.0',u'multorder':u'0',u'orders':u'0'})

    transData["RECORDS"].sort(lambda x,y: cmp(x['pubdate'], y['pubdate']))
    transData["RECORDS"] = sorted(transData["RECORDS"], key=lambda x:x['pubdate'])

    thirty = transData["RECORDS"][:30]
    thirty_2 = transData["RECORDS"][30:]

    fifteen = thirty_2[:15]
    fifteen_2 = thirty_2[15:]

    seven = fifteen_2[1:8]
    seven_2 = fifteen_2[8:]

    three = seven_2[1:4]
    three_2 = seven_2[4:]

    data = dict()

    data["thirty"] = calc(thirty,thirty_2)
    data["fifteen"] = calc(fifteen,fifteen_2)
    data["seven"] = calc(seven,seven_2)
    data["three"] = calc(three,three_2)


    sql = 'SELECT country_code FROM `transaction` WHERE product_id=%s GROUP BY country_code ORDER BY count(*) DESC LIMIT 3'%(productId)

    cur.execute(sql)

    topSaleCountry = ''
    for item in cur:
        topSaleCountry += item[0]
        topSaleCountry +=','

    data["topSaleCountry"]  = topSaleCountry.strip(',')

    sql = 'SELECT count(*) FROM `transaction` WHERE product_id=%s AND date_sub(curdate(), INTERVAL 7 DAY) <= date(`pub_date`)'%(productId)

    cur.execute(sql)

    data["weekly_orders"] = cur.fetchone()[0]

    #更新数据
    sql = 'UPDATE products SET weekly_orders=%d,top_sale_country="%s",monthly_growth_rates=%s,fifteen_growth_rates=%s,seven_growth_rates=%s,three_growth_rates=%s WHERE product_id=%s'%(int(data["weekly_orders"]),data["topSaleCountry"],data["thirty"],data["fifteen"],data["seven"],data["three"],productId)
    cur.execute(sql)
    conn.commit()

    #
    cur.close()
    conn.close()

    return data

