import requests
from bs4 import BeautifulSoup
from time import sleep
from selenium import webdriver
import publish
import urllib
import json
import time
import base64
import hashlib
import re
import html
import oss2
import datetime
import string
import random
import pymysql
import cpca

con = pymysql.connect('114.215.192.224','jr1','JRyhdweb123','quzizhu',charset='utf8')


cur = con.cursor()


def insert(sql, args=None):
    cur.execute(sql,args)
    con.commit()
    return cur.lastrowid


def select(sql):
    cur.execute(sql)
    return cur.fetchall()

def driver_detail():
    file_name = './data2.json'
    fp = open(file_name)
    data = json.load(fp)
    driver = webdriver.Chrome(executable_path='./chromedriver')
    n = 0
    while n < 100:
        n += 1
        for i in data:
            print(i)
            driver.get(i)
            page = driver.page_source
            soup = BeautifulSoup(page, "html.parser")
            div = soup.find("div", class_="base full")
            if div == None:
                continue
            title = div.find("div", class_="full-name")
            price = div.find("span", class_="price")
            info = div.find("div", class_="params-container")
            thumb = div.find("div", class_="img img-big")
            detail = div.find("div", class_="rich-text no-p-margin")
            link = info.div
            origin = link.next_sibling.get_text()
            origin = origin[2:]
            if title:
                title = title.get_text()
            else:
                title = '木托盘'

            if price:
                price = price.get_text()
                price = price.strip()
                price = price[1:-1]
            else:
                price = '面议'
            size = '根据客户要求定制'
            trade_place = publish.get_random_city()
            type = get_random_type()
            location_str = []
            location_str.append(title)
            send = cpca.transform(location_str,pos_sensitive=True)
            send_area = send['省'].tolist()
            sendArea = send_area[0]
            if(sendArea):
                trade_place = sendArea
            else:
                trade_place = trade_place
            print(trade_place)
            hash_str = get_hash_str()
            unit = '立方'
            url = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', str(thumb))
            url = url[0]
            detail_text = '成色好，价格优，库存足'
            new_url = html.unescape(url)
            get_url = upload_oss(new_url)
            insert_id = insert("insert into stpc_qzz_provider (name, type,size,origin,trad_place,price,hash,detail_text,thumb,unit)  values ('{}','{}','{}','{}','{}','{}','{}','{}','{}','{}')".format(title, type, size, origin, trade_place, price, hash_str, detail_text, get_url, unit))
            print(insert_id)
            detail_img = re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', str(detail))
            detail_img = detail_img[1:7]
            if detail_img:
                for de in detail_img:
                    new_de = html.unescape(de)
                    if len(new_de) > 100:
                        get_de_url = upload_oss(new_de)
                        print(get_de_url)
                        img_id = insert("insert into stpc_qzz_provider_img (provid_id, img, type) values ('{}','{}','{}')".format(insert_id, get_de_url, 'supply'))
                        print(img_id)
            else:
                img_id = insert("insert into stpc_qzz_provider_img (provid_id, img, type) values ('{}','{}','{}')".format(insert_id, get_url, 'supply'))
                print(img_id)
            insert(
                """
                insert into stpc_qzz_short_link (c_name, hash_str, p_name, p_id) values('{}','{}','{}', {})
                """.format('SpdAction',hash_str,'page',insert_id)
            )
            # exit()
            sleep(60)

def upload_oss(url):
    now = datetime.datetime.now()
    random_name = now.strftime("%Y%m%d%H%M%S") + ''.join([random.choice(string.digits) for _ in range(4)])
    cname = 'http://jr1test.oss-cn-beijing.aliyuncs.com'
    file_name = 'qzz{}.jpg'.format(random_name)
    auth = oss2.Auth('LTAI4FzRT6vM9QX1giBon5vT', '0siDWaBj1c65O8G6YzGmU9nbFaYiLV')
    bucket = oss2.Bucket(auth, 'http://jr1test.oss-cn-beijing.aliyuncs.com', 'jr1test', is_cname=True)
    url = url
    resp = requests.get(url).content
    bucket.put_object(file_name, resp)
    get_url = cname + '/' + file_name
    return get_url

def get_hash_str():
    """
    用来生成短连接
    """
    md5 = hashlib.md5()
    md5.update(str(time.time()).encode('utf-8'))
    digest = md5.digest()
    hash_str = str(base64.b64encode(digest),'utf-8')
    ret = []
    for i in hash_str:
        if  (i >= '0' and i <='9') or (i>='a' and i<='z') or (i>='A' and i<='Z'):
            ret.append(i)
            if len(ret) == 6:
                break
    print(ret)
    return ''.join(ret)


def get_random_type():
    return random.choice([
        '原木',
        '板材',
        '木方',
        '单板/托盘料',
        '其他',
        '家具',
        '托盘/包装箱',
        '脚墩/木条',
        '实木脚墩',
        '胶合板/多层板脚墩',
        '刨花脚墩',
        '锯沫脚墩',
        '托盘料',
        '木箱',
        '短料',
        '长料',
        '薄板',
        '木皮',
        '杂木脚墩',
        '松木脚墩',
        '其它',
        '超长料',
        '建筑木方',
        '景观木方',
        '枕木/跳板',
        '家具木方',
        '松木托盘',
        '杨木托盘',
        '杂木托盘',
        '胶合板托盘',
        '密度板',
        '大芯板',
        '刨花板',
        '胶合板',
        '装饰面板',
        '边角料'
        ])

if __name__ == '__main__':
    # save_detail()
    driver_detail()