# -*- coding:utf-8 -*-
"""
http://www.thunder.cc/
"""
import requests
from bs4 import BeautifulSoup
import json
import time


def get_html_text(url):
    headers = {
        'Accept-Language': 'zh-CN,zh;q=0.9',
        'Connection': 'keep-alive',
        'Host': 'www.chaindd.com',
        'Referer': 'http://www.chaindd.com/',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
        'X-Requested-With': 'XMLHttpRequest', }
    try:
        r = requests.get(url, timeout=30, headers=headers)
        r.raise_for_status()
        r.encoding = r.apparent_encoding
        return r.json()
    except Exception as e:
        print(e)
        with open('errorurl.txt', 'a', encoding='utf-8') as f:
            f.write(url + '\n')
        return None


def get_html_text1(url):
    headers = {
        'Accept-Language': 'zh-CN,zh;q=0.9',
        'Connection': 'keep-alive',
        'Host': 'www.chaindd.com',
        'Referer': 'http://www.chaindd.com/',
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.102 Safari/537.36',
        'X-Requested-With': 'XMLHttpRequest', }
    try:
        r = requests.get(url, timeout=30, headers=headers)
        r.raise_for_status()
        r.encoding = r.apparent_encoding
        return r.text
    except Exception as e:
        print(e)
        with open('errorurl.txt', 'a', encoding='utf-8') as f:
            f.write(url + '\n')
        return None


def writedata(l):
    with open('链得得推荐文章.txt', 'a', encoding='utf-8') as f:
        f.write(json.dumps(l, ensure_ascii=False) + '\n')


def get_urls(url):
    text = get_html_text(url)
    data = text['data']
    titles = [title['title'] for title in data]
    imgs = ['<img "src={}">'.format(x['200_150'][0]['url']) for x in (img['thumb_image'] for img in data)]
    urls = [url['short_url'] for url in data]
    ctimes = [time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(ctime['time_updated']))) for ctime in data]
    jigou = [jigou['authors'][0]['username'] for jigou in data]
    return (titles, ctimes, imgs, urls, jigou)


def get_datas(url, title, ctime, img, jigou):
    text = get_html_text1(url)
    print('{}页面获取中'.format(url))
    if not text:
        return None
    soup = BeautifulSoup(text, 'lxml')
    context = str(soup.select_one('.inner'))
    d = {"title": title, "img": img, "context": context, "ctime": ctime, "jigou": jigou}
    return d


def result():
    try:
        with open('config.txt', encoding='utf-8') as fd:
            dt = fd.readline()
    except:
        dt = ''
    a = 1
    b = True
    while b:
        url = 'http://www.chaindd.com/ajax/common/get?url=%2Fv1%2Flists%2Fhome&data=offset%3D{}%26limit%3D15%26post_fields%3Dv_tags%3Baccess%3Bif_current_user_voted%26tag_special_background_image_size%3D%5B%22640_256%22%5D%26auction_background_image_size%3D%5B%22640_256%22%5D%26ad_image_size%3D%5B%22640_256%22%5D%26focus_post_image_size%3D%5B%22640_256%22%5D%26homepage_universal_article_group_image_size%3D%5B%22210_240%22%5D%26special_column_post_image_size%3D%5B%22210_240%22%5D%26homepage_tag_group_image_size%3D%5B%22210_240%22%5D%26homepage_author_group_image_size%3D%5B%22210_240%22%5D%26thumb_image_size%3D%5B%22200_150%22%2C%22640_256%22%2C%22300_240%22%2C%22200_160%22%5D'
        url = url.format(a * 15)
        try:
            data = get_urls(url)
            titles = data[0]
            ctimes = data[1]
            imgs = data[2]
            urllist = data[3]
            jigou = data[4]
            print('第{}页获取中'.format(a))
            if not urllist:
                continue
            b = 0
            for url in urllist:
                d = get_datas(url, titles[b], ctimes[b], imgs[b], jigou[b])
                ctime = d['ctime']
                if dt >= ctime:
                    b = False
                    writedata({"title": '', "img": '', "context": '', "ctime": '', "jigou": ''})
                    break
                writedata(d)
                print(a)
                b += 1
        except:
            with open('errorurl.txt', 'a', encoding='utf-8') as f:
                f.write(url + '\n')

        a += 1


def main():
    result()

# with open('url.txt', encoding='utf-8') as f:
#     a = 1
#     for x in f:
#         try:
#             get_datas(x)
#             print(a)
#             a += 1
#         except:
#             with open('errorurl.txt', 'a', encoding='utf-8') as f:
#                 f.write(x + '\n')
