# coding=utf-8

import threading
from time import sleep
import json
import csv

# each_down
import requests


class Statistics:
    
    def __init__(self):
        self.done = 0
        
    def __str__(self):
        return "done=%s" % (self.done)

    def add_done(self):
        self.done += 1


statistics = Statistics()

flag_run = True


def p(str1):
    if 1:
        print("%s" % str1)


header1 = {
    "Accept":"application/json, text/javascript",
    "X-Requested-With":"XMLHttpRequest",
    "User-Agent":"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.23 Safari/537.36",
    "Content-Type":"application/x-www-form-urlencoded",
    "Referer":"https://www.toutiao.com/c/user/3392004608/",
    "Accept-Encoding":"gzip, deflate, br",
    "Accept-Language":"zh-CN,zh;q=0.9",
    'Cookie': '在chrome F12 network中找到拷贝过来'
}


def _parse_json(json_obj, str1):
    try:
        return json_obj[str1]
    except Exception as e:
        print("#################################")
        print(e)
        print("###")
        print(json_obj)
        print("###")
        print(str1)
        print("#################################")
        return ''

    
def list_2_each(session, writer, is_title, max_repin_time):
    global statistics
    global flag_run
    
    r = session.post("https://www.toutiao.com/c/user/favourite/?page_type=2&user_id=3392004608&max_behot_time=0&count=20&as=A195EBCD6557DA5&cp=5BD537DD5AA58E1&_signature=AOjajRAaW0Oqx5oN0LxVfADo2p&max_repin_time=%s" % max_repin_time, headers=header1)
#     p("###" + r.text)
    
    json_ret = json.loads(r.text, encoding="utf-8")
#     p("### %s" % json_ret)
    
    json_data = json_ret['data']
#     print("len(json_data)=%s" % len(json_data))
    
    for json_data1 in json_data:
        
        if is_title:
#         keys = ['tag', 'title', 'url', 'from']
            keys = list(json_data1.keys())
    #         print (keys)
            writer.writerow(keys) 
            
            is_title = False
        else:
#         chinese_tag = _parse_json(json_data1, 'chinese_tag')
#         title = _parse_json(json_data1, 'title')
#         display_url = _parse_json(json_data1, 'display_url')
#         source = _parse_json(json_data1, 'source')
        
#         writer.writerow([chinese_tag, title, display_url, source])
            writer.writerow(list(json_data1.values()))
            
            statistics.add_done()
    
    if json_ret['has_more']:
        sleep(0.5)
        list_2_each(session, writer, False, json_ret['max_repin_time'])
    else:
        flag_run = False

    
if __name__ == '__main__':
     
    session = requests.Session()
    csvfile = open('1.csv', 'w', newline='', encoding='utf-8')  
#     writer = csv.writer(csvfile, delimiter='\t', quoting=csv.QUOTE_ALL)
    writer = csv.writer(csvfile)
    
    threads = []

    # 
    for i in range(0, 1):
        t = threading.Thread(target=list_2_each, args=(session, writer, True, 0))
        threads.append(t)
  
    # 开干
    count = range(len(threads))
    for i in count:
        threads[i].start()
  
    i = 0
    while flag_run:
        sleep(5)
        print("info: %s" % (statistics))
           
    # 收工
    for i in count:
        threads[i].join()
    print("info: %s" % (statistics))
    session.close()
    csvfile.close()
    print("*** OVER ***")
