#coding:utf-8
'''
* coder  : dzlua
* email  : 505544956@qq.com
* module : spider
* path   : wxgzh
* file   : qsbk.py
* time   : 2017-12-27 13:11:33
'''
#--------------------#
import sys, os, codecs, re, time
sys.path.append('../module/')
#--------------------#
from spider import Spider
from bs4 import BeautifulSoup
#--------------------#

#--------------------#
cfg = {
    'url': 'https://www.qiushibaike.com',
    'tb': 'jsbk',
}
#--------------------#

#--------------------#
def info_run(sp, fun, totle, tp='begin', fenge=200000, step=10):
    last_index = 0
    b = tp=='begin'
    for i in range(step, totle+step, step):
        if b:
            if i >= fenge:
                break
        else:
            if i < fenge:
                last_index = i
                continue
        #
        sp.prt('%s, %s' % (last_index, i))
        fun(sp, last_index, i)
        sp.wait()
        last_index = i
#--------------------#

#--------------------#
def save_data(sp, href, content, comment):
    sql = "INSERT INTO " + cfg['tb'] + \
        " (name, href)" \
        " VALUES(%s,%s);"
    param = ( u'鲸媒体', href )
    try:
        if not sp.has_data(cfg['tb'], 'href', href):
            sp.execute(sql, param)
    except:
        sp.logp('save_data : error save. %s' % (href))
        return
    sp.prt('save_data : save done. %s' % (href))
#--------------------#

#--------------------#
def handle_info(sp, url, resp, args):
    if not resp:
        sp.logp('handle_info_error : no response. id: %s, url: %s' % (id, url))
        return
    soup = BeautifulSoup(resp, "lxml")
    div = soup.find('div', class_=['content'])
    if not div:
        sp.logp('handle_info_error : no content. url: %s' % (url))
        return
    content = sp.del_empty_line(sp.get_strings(div))
    #
    div = soup.find('div', class_=['comments-list'])
    if not div:
        sp.logp('handle_info_error : no comments-list clearfix. url: %s' % (url))
        return
    for ele in div.children:
        if ele.name != 'div':
            continue
        div = ele.find('div', class_=['report'])
        if not div:
            continue
        report = sp.del_empty_line(div.text)
        #
        div = ele.find('div', class_=['replay'])
        if not div:
            continue
        a = div.find('a', class_=['userlogin'])
        if not a:
            continue
        author = sp.del_empty_line(a.get('title'))
        #
        span = div.find('span', class_=['body'])
        if not span:
            continue
        comment = sp.del_empty_line(span.text)
        #save_data(sp, content, '%s,%s,%s' % (report, author, comment))
    #
    print content
    #
    div = soup.find('input', id='articleNextLink')
    if div:
        sp.put(handle_info, sp.url_join(cfg['url'], div.get('value')))
        print sp.url_join(cfg['url'], div.get('value'))
    return
    save_info(sp, id, {
        'title': title,
        'author': author,
        'cat': cat,
        'date': date,
        'abstract': abstract,
        'text': text,
    })
#--------------------#

#--------------------#
def run(sp):
    sp.put(handle_info, sp.url_join(cfg['url'], 'article/119679492'))
#--------------------#

#--------------------#
sp = Spider('localhost', 'dzlua', 'dzlua', 'spider', 4, 10, charset='utf8mb4')
#
sql = "CREATE TABLE IF NOT EXISTS " + cfg['tb'] + \
    " (id INT AUTO_INCREMENT PRIMARY KEY," \
    " name TEXT," \
    " author TEXT," \
    " href TEXT," \
    " date TEXT," \
    " info TEXT," \
    " comment TEXT )" \
    " DEFAULT CHARSET=utf8mb4;"
sp.execute(sql)
#----------#
run(sp)
#----------#
sp.wait()
sp.close()
#--------------------#
