#coding:utf-8
'''
* coder  : dzlua
* email  : 505544956@qq.com
* module : spider
* path   : sougou
* file   : run.py
* time   : 2018-01-10 09:41:56
'''
#--------------------#
import sys, os, codecs, re, time, threadpool
sys.path.append('../module/')
#--------------------#
from spider import DataBase
from bs4 import BeautifulSoup
#--------------------#

#--------------------#
class Parser(DataBase):
    def __init__(self, host, user, pwd, db, threads=4, tasks=0, charset='utf8mb4'):
        self.pool = threadpool.ThreadPool(threads, tasks)
        DataBase.__init__(self, host, user, pwd, db, charset)
    def wait(self):
        self.pool.wait()
    def put(self, fun, *args, **kwargs):
        reqs = threadpool.makeRequests(fun, [(args, kwargs)])
        [ self.pool.putRequest(req) for req in reqs ]
        self.prt('put fun.')
    def url_join(self, host, url):
        if url[0] == '/':
            return os.path.join(host, url[1:])
        else:
            return os.path.join(host, url)
    @staticmethod
    def dir_files(path, dirs=[], files=[]):
        fs = []
        #
        def has(ct, dts):
            for d in dts:
                if d == ct:
                    return True
            return False
        #
        def _list(root, abs=''):
            for lists in os.listdir(root):
                path = os.path.join(root, lists)
                if os.path.isfile(path):
                    if not has(lists, files):
                        fs.append(os.path.join(abs, lists))
                if os.path.isdir(path):
                    if not has(lists, dirs):
                        _list(path, os.path.join(abs, lists))
        #
        _list(path)
        return fs
    #----------#
    def start(self, root):
        for f in self.dir_files(root):
            dt = f[-4:]
            if dt != '.txt' and dt != '.dat':
                continue
            #self.put(self.handle_file, self.url_join(root, f))
            self.handle_file(self.url_join(root, f))
    def handle_file(self, file):
        data = {}
        with open(file, 'r') as fp:
            for line in fp:
                tp = line[:5]
                if tp == '<doc>':
                    data = {}
                elif tp == '</doc':
                    self.save_data(**data)
                elif tp == '<url>':
                    data['url'] = line[5:-7]
                elif tp == '<docn':
                    data['docno'] = line[7:-9]
                elif tp == '<cont':
                    if line[:14] == '<contenttitle>':
                        data['title'] = line[14:-16]
                    else:
                        data['content'] = line[9:-11]
    def save_data(self, url, docno, title, content):
        sql = u"INSERT INTO sougou_data" \
            u" (url, docno, contenttitle, content)" \
            u" VALUES(%s,%s,%s,%s);"
        param = ( url, docno, title, content )
        try:
            sp.execute(sql, param)
        except:
            sp.logp('save_data : error save. %s, %s' % (url, docno))
            return
        sp.prt('save_data : save done. %s, %s' % (url, docno))
#--------------------#

#--------------------#
sp = Parser('localhost', 'dzlua', 'dzlua', 'spider', threads=4, tasks=10, charset='gb2312')
#
sql = "CREATE TABLE IF NOT EXISTS sougou_data " \
    " (id INT AUTO_INCREMENT PRIMARY KEY," \
    " url TEXT," \
    " docno TEXT," \
    " contenttitle blob," \
    " content blob )" \
    " DEFAULT CHARSET=gb2312;"
sp.execute(sql)
#
sp.start('/home/dzlua/tg-data/data')
sp.wait()
sp.close()
#--------------------#
