#-*- coding:utf-8 -*-
#  @author: hzq <151680523@qq.com>

# @version     $Id:filename.php

import time

import inc
from libs import libs
from libs import mysql
from libs import readRss
from libs import tags
import copy
import  re
from libs import htmlparser
def hex2dec(string_num):
    return (int(string_num.upper(), 16))

def table_hash(strs, num):
    d = hex2dec(str(libs.md5(strs)[0: 4]))
    return max(d % int(num), 1)




class dataerContrl:
    
    
    def __init__(self,db,tags,rss):
        self.htmlparser= htmlparser.htmler(html='gry')
        
        self.mydb=db
        self.mytags=tags
        self.rss=rss
        
    def add_tags(self,tags, type, logid):
        p = []
        tagsid = ''
        for t in tags:
            p.append((t,))
            if tagsid == '':
                tagsid = "'%s'" % (t,)
            else:
                tagsid = "%s,'%s'" % (tagsid, t)
            
        tagsid = str(tagsid[0:len(tagsid)])
        self.mydb.query_commit("insert ignore  into tb_tags (tags) values (%s) ", p)
       
        taglist = self.mydb.getResultAll(sql="select tagid from tb_tags where tags in (%s)" % tagsid)
        #d = taglist.values()
        p = []
        for v in taglist:
            p.append((logid, v['tagid']))
    
        self.mydb.query_commit("insert ignore  into tb_tag_logs (logid,tagid) values (%s,%s) ", p)


    def insertSubject(self,data):
        '''
        采集到数据并入库
        
        '''
        tmp=copy.copy(data)
       
        tbid = table_hash(data['subject'], 4)
            
        data['content_tb_id'] = str(tbid)
        del data['conetnts']
        id = self.mydb.insertData('tb_subject', data)
         
 
        self.mytags.do_fenci(tmp['conetnts'])
        tt = self.mytags.getTags('food')
        if tt:
            tags = ','.join(tt[0:6])
            self.mydb.insertData('tb_contents_%s' % tbid, {"logid":str(id), "conetnts":tmp['conetnts'], "tags":tags})
            self.add_tags(tt, 1, id)
        else:
                self.mydb.insertData('tb_contents_%s' % tbid, {"logid":str(id), "conetnts":tmp['conetnts'], "tags":''})
        return True
        
    
    
    def do_rss(self,url):
        '''
        处理 rss 
        '''
        data = self.rss.xmlParse(url)
    
        db = {}
        for v in data:
            db['subject'] = v['title']
            db['times'] = str(int(time.time()))
            db['author'] = v['author']
            db['conetnts'] = v['description']
            self.insertSubject(db)
            db={}
    def do_article(self,urls):
        c=[]
        clearn=False
        self.htmlparser.setUrl(urls)

        c.append(self.htmlparser.getContents())

        p=self.htmlparser.getPage(12)
        t=self.htmlparser.getTitle()
        for i in p:
            self.htmlparser.setUrl(str(i))
            if self.htmlparser.getTitle()==t:
                clearn=True
                c.append(self.htmlparser.getContents())
        s=''.join(c)
        if clearn:
            re_cdata = re.compile('<a [^\:<>]+>(上一页|[\[\]0-9]+|下一页)</a>', re.I)
            s=re_cdata.sub('',s)
        db={}

        db['subject'] = t
        db['times'] = str(int(time.time()))
        db['author'] = 'hzq'
        db['conetnts'] = s
        if db['subject'] and  db['conetnts']:
            self.insertSubject(db)

         
         


     
     
        


if __name__=='__main__':
    mydb = mysql.mysql(inc.dbIni, inc.cacheDir)

    mytags = tags.tags('data/dict')
    
    rss = readRss.readRss('item')
    dataers=dataerContrl(mydb,mytags,rss)
    dataers.do_article('http://www.meishichina.com/Eat/WeiBo/201111/109119.html')
    

    









    