#! /usr/bin/python
# -*- coding: utf-8 -*-
# 分类爬虫

from pyquery import PyQuery as pq
import hashlib,re,sys
from sqlalchemy import *
from sqlalchemy.orm import mapper, sessionmaker
from bs4 import BeautifulSoup
from  datetime  import  *
import math
import string
import gzip,urllib2
import urllib
import StringIO
import random
import time
import json

class Cats(object):
    pass

class Product(object):
    pass

class ProductPrice(object):
    pass

class ProductSupplier(object):
    pass

class Manufacturer(object):
    pass

class Supplier(object):
    pass

class hq:
    def __init__(self):
        self.engine = create_engine('mysql://root:123456@localhost/ali?charset=utf8', echo=True)
        metadata = MetaData()
        cats_table = Table('hq_cats', metadata,
                               Column('id', Integer, primary_key=True),
                               Column('name', CHAR(255)),
                               Column('title', CHAR(255)),
                               Column('url', CHAR(255)),
                               Column('url_code', Unicode(255)),
                               Column('pid', Unicode(255)),
                               Column('status', Unicode(255)),
                               Column('num', Integer),
                               )
        product_table = Table('hq_product', metadata,
                              Column('id', Integer, primary_key=True),
                              Column('model', CHAR(255)),
                              Column('manufacturer_id', CHAR(255)),
                              Column('goodsid', Unicode(255), primary_key=True),
                              Column('url', TEXT),
                              Column('cat_code', Unicode(255)),
                              Column('status', Integer),
                              )
        manufacturer_table = Table('hq_manufacturer', metadata,
                              Column('id', Integer, primary_key=True),
                              Column('name', CHAR(255)),
                              Column('url_code', Unicode(255), primary_key=True),
                              Column('url', Unicode(255)),
                              Column('title', Unicode(255)),
                              )
        supplier_table = Table('hq_supplier', metadata,
                              Column('id', Integer, primary_key=True),
                              Column('name', CHAR(255)),
                              Column('url_code', Unicode(255), primary_key=True),
                              Column('url', Unicode(255)),
                              )
        price_table = Table('hq_price', metadata,
                                 Column('id', Integer, primary_key=True),
                                 Column('product_id', Integer),
                                 Column('supplier_id', Unicode(255)),
                                 Column('goods_id', Unicode(255)),
                                 Column('quantity', Unicode(255)),
                                 Column('minimum', Unicode(255)),
                                 Column('price', Unicode(255)),
                                 Column('price_hk', Unicode(255)),
                                 Column('delivery', Unicode(255)),
                                 Column('delivery_hk', Unicode(255)),
                                 )
        product_sup_table = Table('hq_product_sup', metadata,
                                 Column('id', Integer, primary_key=True),
                                 Column('product_id', Integer),
                                 Column('supplier_id', Unicode(255)),
                                 Column('name', Unicode(255)),
                                 Column('msg', TEXT),
                                 )
        metadata.create_all(self.engine)
        mapper(Cats, cats_table)
        mapper(Product, product_table)
        mapper(Manufacturer, manufacturer_table)
        mapper(Supplier, supplier_table)
        mapper(ProductPrice, price_table)
        mapper(ProductSupplier, product_sup_table)
        Session = sessionmaker()
        Session.configure(bind=self.engine)
        self.db = Session()

    def cats(self):
        url = 'http://www.hqchip.com/glist/all_cate.html'
        print "load index html in : %s" % (url)
        html = self.get_gzip(url=url)  # 获取html
        soup = BeautifulSoup(html)  # 生成DOM树
        cat_html = soup.find(name="div",attrs={"class","main-list"})
        dl = cat_html.find_all(name="dl")
        for cat in dl:
            dt = cat.find(name="dt").find(name="a")
            dao = Cats()
            dao.url = dt.get("href")
            dao.url_code = hashlib.md5(dao.url).hexdigest().upper()
            dao.title = dt.string.strip()
            dao.name = dao.url.split('/')[-1]
            dao.pid = 0
            self.db.add(dao)
            self.db.flush()
            self.db.commit()
            pid = dao.id
            for dd in cat.find_all(name="dd"):
                cats = Cats()
                cats.url = dd.a.get("href")
                cats.url_code = hashlib.md5(cats.url).hexdigest().upper()
                cats.title = dd.a.string.strip()
                cats.name = cats.url.split('/')[-1]
                cats.pid = pid
                self.db.add(cats)
                self.db.flush()
                self.db.commit()

    def pro(self):
        q = self.db.query(Cats).filter(Cats.pid > 0).order_by(asc(Cats.id))
        for cat in q:
            catnum = 0
            if cat.status > 0 and cat.status == cat.num:
                continue
            elif cat.status != cat.num:
                catnum = int(cat.num)
            url = u"http://www.hqchip.com%s" % (cat.url)
            print "load cats html in : %s" % (url)
            html = self.get_gzip(url=url)  # 获取html
            soup = BeautifulSoup(html)  # 生成DOM树
            # 获取页码
            num = 1
            try:
                page = soup.find(name="div",attrs={"class","pagn-bottom"}).find_all(name="a")[-2]
                num = page.string.strip()
                num = int(num)
                cat.num = num
            except:
                num = 1

            if num == 1:
                try:
                    table = soup.find_all(name="table", attrs={"class", "subtab"})[1].tbody.find_all(name="tr")
                    for tr in table:
                        product = Product()
                        tdall = tr.find_all(name="td")
                        product.url = tdall[0].a.get("href")
                        product.model = tdall[0].a.get("data-goodsname")
                        product.goodsid = tdall[0].a.get("data-goodsid")
                        product.manufacturer_id = tdall[2].span.string.strip()
                        product.cat_code = cat.id
                        self.db.add(product)
                        self.db.flush()
                        self.db.commit()
                except:
                    print "----- 列表获取错误 ------"
                    continue
            else:
                pnum = catnum
                while pnum <= num:
                    cat.status = pnum
                    print "pnum: %s" % (pnum)
                    print "num: %s" % (num)
                    try:
                        strurl = "%s_%s" % (url,pnum)
                        print "load cats html in : %s" % (strurl)
                        html = self.get_gzip(url=strurl)  # 获取html
                    except:
                        strurl = "%s_%s" % (url, pnum)
                        print "load cats html in : %s" % (strurl)
                        html = self.get_gzip(url=strurl)  # 获取html
                    soup = BeautifulSoup(html)  # 生成DOM树
                    pnum += 1
                    try:
                        table = soup.find_all(name="table",attrs={"class","subtab"})[1].tbody.find_all(name="tr")
                        for tr in table:
                            product = Product()
                            tdall = tr.find_all(name="td")
                            product.url = tdall[0].a.get("href")
                            product.model = tdall[0].a.get("data-goodsname")
                            product.goodsid = tdall[0].a.get("data-goodsid")
                            product.manufacturer_id = tdall[2].span.string.strip()
                            product.cat_code = cat.id
                            self.db.add(product)
                            self.db.flush()
                            self.db.commit()
                    except:
                        continue

    def ext(self):
        q = self.db.query(Product).filter(Product.status <= 0).order_by(asc(Product.id))
        for p in q:
            url = u"http://www.hqchip.com%s" % (p.url)
            print "load product html in : %s" % (url)
            try:
                html = self.get_gzip(url=url)  # 获取html
                text = pq(html)
                _json = json.loads(text("#numbers_list").val())
            except:
                continue
            for spuu in text("ul.dt-select-supp li"):
                sli = pq(spuu)
                spuu_name = sli.attr("class")
                print spuu_name
                for spuu_json in _json.get(spuu_name).get("list").values():
                    # 只有的供应
                    if spuu_name == 'supplier':
                        for pp in spuu_json.get("json_price"):
                            pri = ProductPrice()
                            pri.product_id = p.id
                            goods_id = str(spuu_json.get("goods_id"))
                            pri.supplier_id = spuu_json.get("provider_name")
                            pri.goods_id = goods_id
                            pri.quantity = str(spuu_json.get("goods_number"))
                            pri.delivery = spuu_json.get("DT")[0]
                            pri.delivery_hk = spuu_json.get("DT")[1]
                            pri.price = pp[3]
                            pri.price_hk = pp[2]
                            pri.minimum = pp[0]
                            self.db.add(pri)
                            self.db.flush()
                            self.db.commit()
                    else:#其它供应商
                        try:
                            for pp in spuu_json.get("json_price"):
                                pri = ProductPrice()
                                pri.product_id = p.id
                                goods_id = str(spuu_json.get("goods_id"))
                                pri.supplier_id = spuu_json.get("provider_name")
                                pri.goods_id = goods_id
                                pri.quantity = str(spuu_json.get("goods_number"))
                                pri.delivery = _json.get(spuu_name).get("DT")[0]
                                pri.delivery_hk = _json.get(spuu_name).get("DT")[1]
                                pri.price = pp[3]
                                pri.price_hk = pp[2]
                                pri.minimum = pp[0]
                                self.db.add(pri)
                                self.db.flush()
                                self.db.commit()
                        except:
                            continue
                p.status = 1
                self.db.flush()
                self.db.commit()


    def get_gzip(self, url):
        header = {'Accept-Charset': 'GBK,utf-8;q=0.7,*;q=0.3',
                  'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.151 Safari/534.16'}
        request = urllib2.Request(url, headers=header)
        request.add_header('Accept-encoding', 'gzip')
        opener = urllib2.build_opener()
        f = opener.open(request)
        print f
        isGzip = f.headers.get('Content-Encoding')
        if isGzip:
            compresseddata = f.read()
            compressedstream = StringIO.StringIO(compresseddata)
            gzipper = gzip.GzipFile(fileobj=compressedstream)
            data = gzipper.read()
        else:
            data = f.read()
        return data

    def get_html(self, url):
        if url != None:
            page = urllib.urlopen(url)
            html = page.read()
        else:
            html = ""
        return html

    def strQ2B(self, ustring):
        ustring = ustring.decode("utf-8", 'ignore')
        rstring = ""
        for uchar in ustring:
            inside_code = ord(uchar)
            print inside_code
            if inside_code == 0x3000:
                inside_code = 0x0020
            else:
                inside_code -= 0xfee0
            if inside_code < 0x0020 or inside_code > 0x7e:
                rstring += uchar.encode('utf-8', 'ignore')
            else:
                rstring += (unichr(inside_code)).encode('utf-8', 'ignore')
        return rstring

if __name__ == '__main__':
    print "Prepare data. is come soon ..."
    spider = hq()
    # spider.cats()
    # spider.pro()
    spider.ext()
