#! /usr/bin/python
#coding=utf-8

import hashlib,re,sys
from sqlalchemy import *
from sqlalchemy.orm import mapper, sessionmaker
from bs4 import BeautifulSoup
from  datetime  import  *
import math
import string
import gzip,urllib2,random,time
import urllib
import StringIO

default_encoding = 'utf-8'
if sys.getdefaultencoding() != default_encoding:
    reload(sys)
    sys.setdefaultencoding(default_encoding)

class Category(object):
    pass

class Product(object):
    pass

class ProductExt(object):
    pass

class Cat:
    def __init__(self):
        self.engine =create_engine('mysql://root:123456@localhost/focal?charset=utf8',echo=True)
        #self.engine =create_engine('mysql://focal:focalweiming@localhost/focal?charset=utf8',echo=True)
        metadata = MetaData()
        category_table=Table('category',metadata,
            Column('id',Integer, primary_key=True),
            Column('name',CHAR(255)),
            Column('url',CHAR(255)),
            Column('title',CHAR(255)),
            Column('keywords',CHAR(255)),
            Column('description',Text),
            Column('desc',Text),
            Column('url_code',Unicode(255)),
            Column('level',Unicode(255)),
            Column('paths',CHAR(255)),
            Column('lang',Unicode(255)),
        )
        product_table=Table('product',metadata,
            Column('id',Integer, primary_key=True),
            Column('name',CHAR(255)),
            Column('url_code',Unicode(255), primary_key=True),
            Column('url',Unicode(255)),
            Column('cat_code',Unicode(255)),
            Column('image',Unicode(255)),
            Column('price',Unicode(255)),
            Column('page',Integer),
            Column('lang',Unicode(255)),
        )
        productext_table=Table('product_ext',metadata,
            Column('id',Integer, primary_key=True),
            Column('product_id',Integer),
            Column('images',Text),
            Column('summary',Text),
            Column('description_m',Text),
            Column('seo_description',Text),
            Column('seo_title',Text),
            Column('seo_keywords',Text),
            Column('lang',Unicode(255)),
        )
        metadata.create_all(self.engine)
        mapper(Category,category_table)
        mapper(Product,product_table)
        mapper(ProductExt,productext_table)
        Session = sessionmaker()
        Session.configure(bind=self.engine)
        self.db = Session()

    #抓取分类
    def getmenucats(self):
        url='http://www.tomtop.com/'
        print "load index html in : %s" % (url)
        html=self.get_gzip(url=url)#获取html
        #print html
        soup = BeautifulSoup(html)#生成DOM树
        print "Find name h4:"
        topcat=soup.find_all(name="h4")#找出所有 H4标签
        print topcat
        print "TOP Category start"
        for tcat in topcat:
            catm = Category()#生成数据模型
            catm.name=tcat.a.string.strip()
            catm.url=tcat.a.get('href').strip()
            catm.url_code=hashlib.md5(catm.url).hexdigest().upper()
            catm.level='1'

            #写入数据库
            self.db.add(catm)
            self.db.flush()
            self.db.commit()
        print "two category start ..............."
        twocats=soup.find_all(name="dt")
        print(twocats)
        for cat2 in twocats:
            cat2s = Category()
            ncat = cat2.a
            print('..................................................')
            if ncat == None:
                continue
            cat2s.name = cat2.a.string.strip()
            print("cat2s name " + cat2s.name)
            cat2s.url = cat2.a.get("href")
            print("cat2s url " + cat2s.url)
            cat2s.url_code = hashlib.md5(cat2s.url).hexdigest().upper()
            print("cat2s url_code " + cat2s.url_code)
            cat2s.level='2'

            #写入数据库
            self.db.add(cat2s)
            self.db.flush()
            self.db.commit()
        print("menu 2 is ok................")

        print "three category start ..............."
        botcats=soup.find_all(name="dd")
        for cat3 in botcats:
            cat3s = Category()
            if cat3.a == None:
                continue
            if cat3.a.string.strip() == "New Arrival":
                continue
            if cat3.a.string.strip() == "Top Sellers":
                continue
            if cat3.a.string.strip() == "Deals":
                continue
            cat3s.name = cat3.a.string.strip()
            cat3s.url = cat3.a.get("href")
            cat3s.url_code = hashlib.md5(cat3s.url).hexdigest().upper()
            cat3s.level='3'
            #写入数据库
            self.db.add(cat3s)
            self.db.flush()
            self.db.commit()
        print("menu 3 is ok................")

    def products(self):
        print "Select Category level 3:"
        # q=self.db.query(Category).filter(Category.url.like("http://www.tomtop.com/cell-phones/%")).order_by(desc(Category.id))
        q=self.db.query(Category).filter(Category.level.in_((2,3))).filter(Category.desc=='0').order_by(desc(Category.id))
        for cat in q:
            urls = cat.url
            print urls
            # htmlpage=self.get_html(url=None)
            htmlpage=self.get_gzip(urls)
            bfpage = BeautifulSoup(htmlpage)
            totalp = bfpage.find(name="div",attrs={"class","category-head"})
            nums = totalp.span.text
            print nums
            numsa = filter(lambda x:x.isdigit(),nums)
            upnum = int(math.ceil(float(numsa)/35))
            print(upnum)
            if upnum==0:
                continue
            page =1
            while page<=upnum:
                urlone = urls + "?page=" + bytes(page)
                if page==1:
                    #key and desc
                    cat.description =bfpage.find(attrs={"name":"description"})['content']
                    cat.keywords = bfpage.find(attrs={"name":"keywords"})['content']
                    cat.desc=upnum
                    print cat.description
                    print cat.keywords
                print urlone
                page+=1
                html=self.get_gzip(urlone)
                # html=self.get_html(url=None)
                # print html
                bf = BeautifulSoup(html)
                plist = bf.find_all(name="li",attrs={"class":"item"})
                for pone in plist:
                    try:
                        product=Product()
                        proimg=pone.find(name="div",attrs={"class":"product-image iconDiscount"})
                        product.url= proimg.a.get("href").strip()
                        product.image= proimg.a.img.get("src").strip()
                        product.name= proimg.a.img.get("alt").strip()
                        product.name=self.strQ2B(product.name)
                        # proprice=pone.find(name="p",attrs={"class":"old-price"})
                        proprice=pone.find(name="span",attrs={"class":"price"})
                        product.price= proprice.text.strip().replace("US$","")
                        product.cat_code=cat.id
                        product.url_code=hashlib.md5(product.url).hexdigest().upper()
                        product.page=page-1
                        product.lang=1
                        self.db.add(product)
                        self.db.flush()
                        self.db.commit()
                    except:
                        continue
                    print "Now page is last page ..."

    def product_one(self):
        print "Select product list is all"
        q=self.db.query(Product).filter(Product.id < 100)
        i=1
        for pro in q:
            try:
                html=self.get_gzip(pro.url)
            except:
                continue
            bf = BeautifulSoup(html)
            print "find div is class=product-specs"
            productspecs=bf.find(name="div",attrs={"class":"product-specs"})
            #print description_m
            print "summary"
            summary = "1"
            #jqzoom=bf.find(name="ul",attrs={"ul":"picList"})
            jqzoomimg=bf.find_all(name="img",attrs={"class","li_small_private"})
            images=""
            for img in jqzoomimg:
                images=images+img.get("src")+","
            extm = ProductExt()
            extm.product_id=pro.id
            extm.images=images.strip()
            #extm.summary=summary.text
            extm.description_m=productspecs
            self.db.add(extm)
            self.db.flush()
            self.db.commit()
            print "add product %s is OK!" % (i)
            i=i+1

    def get_gzip(self,url):
        header = {'Accept-Charset':'GBK,utf-8;q=0.7,*;q=0.3','User-Agent' : 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.151 Safari/534.16'}
        request = urllib2.Request(url,headers=header)
        request.add_header('Accept-encoding', 'gzip')
        opener = urllib2.build_opener()
        f = opener.open(request)
        print f
        isGzip = f.headers.get('Content-Encoding')
        if isGzip :
            compresseddata = f.read()
            compressedstream = StringIO.StringIO(compresseddata)
            gzipper = gzip.GzipFile(fileobj=compressedstream)
            data = gzipper.read()
        else:
            data = f.read()
        return data

    def get_html(self,url):
        if url != None:
            #url="http://www.focalprice.com/iphone-5s/ca-001024.html"
            page = urllib.urlopen(url)
            html = page.read()
        else:
            html=""
        return html

    def catpath(self):
        #分析分类的级别
        q=self.db.query(Category).filter(Category.level.in_((1,2,3))).order_by(asc(Category.id))
        # fobj=open('cat.sql','a')
        # sql=''
        # for cat in q:
        #     cat.url = cat.url.strip('http://www.tomtop.com/')
        #     # urls = cat.url
        #     # print cat.name
        #     # print cat.id
        #     sql=sql+'\n'+"INSERT INTO oc_category (`category_id`,`status`,parent_id,top,`column`,sort_order,date_added,date_modified) VALUES ( %s,1,0,0,1,0,'2014-10-20 16:27:03','2014-10-20 16:27:03');" % (cat.id)
        #     # self.db.execute(sql)
        #     name=cat.name.replace("'","\\'").replace('"','\\"')
        #     print name
        #     sql=sql+'\n'+"INSERT INTO oc_category_description (`category_id`,`language_id`,`name`,description,meta_description,meta_keyword) VALUES ( %s , 1 , \"%s\",'','','');" % (cat.id,name)
        #     # self.db.execute(sql)
        # fobj.write(sql)
        # fobj.close()
        for cat in q:
            re_site=re.compile('http://www.tomtop.com/',re.I)
            urls = re_site.sub('',cat.url)
            x='http://www.tomtop.com/'
            if '/' in urls:
                url=urls.split('/')
                print cat.url
                print url
                paths=[]
                for i in url:
                    x=x+i
                    print x
                    so=self.db.query(Category).filter(Category.url==x).first()
                    x=x+'/'
                    print so.id
                    paths.append('%s' % (so.id))
                paths=','.join(paths)
                cat.paths=paths
            else:
                cat.paths=cat.id
                # print '1'
            self.db.commit()

    def ppaths(self):
        q=self.db.query(Category).filter(Category.level.in_((1,2,3))).order_by(asc(Category.id))
        sql=''
        fobj=open('cat.sql','a')
        for i in q:
            level=0
            print i.paths
            if ',' in i.paths:
                pids=i.paths.split(',')
                for pid in pids:
                    sql=sql+'\n'+ "INSERT INTO oc_category_path (`category_id`,`path_id`,`level`) VALUES (%s,%s,%s);" % (i.id,pid,level)
                    level=level+1
            else:
                sql=sql+'\n'+"INSERT INTO oc_category_path (`category_id`,`path_id`,`level`) VALUES (%s,%s,%s);" % (i.id,i.id,level)
        fobj.write(sql)
        fobj.close()

    def strQ2B(self,ustring):
        ustring=ustring.decode("utf-8",'ignore')
        rstring=""
        for uchar in ustring:
            inside_code=ord(uchar)
            print inside_code
            if inside_code==0x3000:
                inside_code=0x0020
            else:
                inside_code-=0xfee0
            if inside_code<0x0020 or inside_code>0x7e:
                rstring+=uchar.encode('utf-8','ignore')
            else:
                rstring+=(unichr(inside_code)).encode('utf-8','ignore')
        return rstring

if __name__ == '__main__':
    #init_db()
    print "Prepare data. is come soon ..."
    cat=Cat()
    # print "Load Category data ..."
    #cat.getmenucats()
    print "Load Product list data ..."
    cat.products()
    # cat.product_one()
    # print "Load Product data ..."
    #cat.product_one()
    # cat.catpath()
    #cat.ppaths()
    print "OK!!!!"