#coding:utf-8
'''
* coder  : dzlua
* email  : 505544956@qq.com
* module : spider
* path   : wxgzh
* file   : lspd.py
* time   : 2017-12-22 09:34:46
'''
#--------------------#
import sys, os, codecs, re, time
sys.path.append('../module/')
#--------------------#
from spider import Spider
from bs4 import BeautifulSoup
#--------------------#

#--------------------#
cfg = {
    'url': 'http://www.ziliaoku.org',
    'tb': 'wxgzh_lspd',
}
#--------------------#

#--------------------#
def info_run(sp, fun, totle, tp='begin', fenge=200000, step=10):
    last_index = 0
    b = tp=='begin'
    for i in range(step, totle+step, step):
        if b:
            if i >= fenge:
                break
        else:
            if i < fenge:
                last_index = i
                continue
        #
        sp.prt('%s, %s' % (last_index, i))
        fun(sp, last_index, i)
        sp.wait()
        last_index = i
#--------------------#

#--------------------#
def save_data(sp, name, href, title):
    sql = "INSERT INTO " + cfg['tb'] + \
        " (name, title, href)" \
        " VALUES(%s,%s,%s);"
    param = ( name, title, href )
    try:
        sp.execute(sql, param)
    except:
        sp.logp('save_data : error save. %s, %s' % (title, href))
        return
    sp.prt('save_data : save done. %s, %s' % (title, href))
#--------------------#

#--------------------#
def save_info(sp, id, info):
    sql = 'UPDATE ' + cfg['tb'] + ' SET info=%s WHERE id=%s;'
    param = ( info, id )
    try:
        sp.execute(sql, param)
    except:
        sp.logp('save_info : error save. id: %s' % (id))
        return
    sp.prt('save_info : save done. id: %s' % (id))
#--------------------#

#--------------------#
def handle_list(sp, url, resp, name):
    if not resp:
        sp.logp('handle_list_error : no response. url: %s' % url)
        return
    soup = BeautifulSoup(resp, "lxml")
    div = soup.find('div', class_=['box'])
    if not div:
        sp.logp('handle_list_error : no box. url: %s' % url)
        return
    for ele in div.children:
        if ele.name == 'ul':
            for a in ele.find_all('a'):
                save_data(sp, name, a.get('href'), a.text)
        elif ele.name == 'div':
            for a in ele.find_all('a'):
                if a.text == u'下一页':
                    url = sp.url_join(cfg['url'], a.get('href'))
                    sp.put(handle_list, url)
                    break
#--------------------#

#--------------------#
def handle_info(sp, url, resp, id):
    if not resp:
        sp.logp('handle_info_error : no response. id: %s, url: %s' % (id, url))
        return
    soup = BeautifulSoup(resp, "lxml")
    div = soup.find('div', class_=['article'])
    if not div:
        sp.logp('handle_info_error : no box. id: %s, url: %s' % (id, url))
        return
    text = sp.del_empty_line(sp.get_strings(div))
    text = re.sub(r'&nbsp', '', text)
    save_info(sp, id, text)
#--------------------#

#--------------------#
def run1(sp):
    #url = sp.url_join(cfg['url'], 'lishi/list/1.html')
    url = sp.url_join(cfg['url'], 'jiankang/list/1.html')
    sp.put(handle_list, url, u'健康指南')
#--------------------#

#--------------------#
def run2(sp, begin, end):
    sql = 'SELECT * FROM %s WHERE id>=%s and id<%s and info is null;' % (
        cfg['tb'], begin, end )
    for r in sp.select(sql):
        sp.put(handle_info, r[3], r[0])
#--------------------#

#--------------------#
sp = Spider('localhost', 'dzlua', 'dzlua', 'spider', 4, 10, charset='utf8mb4')
#
sql = "CREATE TABLE IF NOT EXISTS " + cfg['tb'] + \
    " (id INT AUTO_INCREMENT PRIMARY KEY," \
    " name TEXT," \
    " title TEXT," \
    " href TEXT," \
    " info TEXT )" \
    " DEFAULT CHARSET=utf8mb4;"
sp.execute(sql)
#----------#
#run1(sp)
info_run(sp, run2, 3294, 'begin', 10000, 10)
#----------#
sp.wait()
sp.close()
#--------------------#
