import codecs
from datetime import datetime
import os

import requests

import config_setting
import util_store
# zhihu_url = "https://www.zhihu.com/api/v4/collections/702943686/items?offset=1&limit=50"
# from webarchive_controller import common

headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36",
    "Connection": "keep-alive",
    "Accept": "text/html,application/json,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
    "Accept-Language": "zh-CN,zh;q=0.8",
    'Cookie': "_xsrf=5Xx2bSb9xJaVsUj9BN7Vg76d6h75rsJc; _zap=d54caa34-1904-489d-b1b5-4cd580738d63; d_c0=E8QTzu3rHBqPTiDmJbs5fXo9WCyACJrqWtw=|1741397494; __snaker__id=Y83FiYxQwpzVhcIm; q_c1=a6c22056cdb745c4855f36812c1c66d0|1741486625000|1741486625000; edu_user_uuid=edu-v1|a86cf6e6-a896-4f5d-881f-c12ec8e7ac63; tst=r; __zse_ck=004_pBEZQyn7f37dnnbkRLYVVvybaPDg0Ser2hoOcEsFDkEOt1mGbmzmF/QiKESN9fZSU4/PTWH0hQBRfSZ8/E2yAoedF82Q35rdzsGjkHNj1yGMkC5CqIKMsSqNd0pX2VHF-XLUDg3M4UevJhmkHaaokriR7SVaRAFh7iMOsJcsheeHh+o6tA7DICnDg7Zrg5gabMKUJkGglpRqdy66KSy0UqnQWb+NUIEqiU3EIcZazx02dyF3b+hhKx9+NnLm1u6fd; z_c0=2|1:0|10:1758972247|4:z_c0|92:Mi4xTV94UkJBQUFBQUFUeEJQTzdlc2NHaVlBQUFCZ0FsVk5WeGZGYVFCUEJqU3JIT3RobkNOUHZ0dXlzUVE3SzZMaU9n|1ce32d0c3dfdb7ae4c1efaa60dd82cb7e401b7339b394d209403db5763f82626; Hm_lvt_98beee57fd2ef70ccdd5ca52b9740c49=1758552145,1758722314,1758972014,1759024203; HMACCOUNT=93D7E290C9D0A540; SESSIONID=XCDLICQqScMLcf68RwJFCFY2lT4Qs9rwuOlOCbYtlJX; JOID=W1EcB0kQKsr5LDIKa_Wi2S_J1B1xSGmDs25xWxdISYe4SV88U1uzGZwnMgdglJj_KtnF8AeS2BIIEI4B7iWAo20=; osd=Vl0QBEkdJsb6LD8GZ_ai1CPF1x18RGWAs2N9VxRIRIu0Sl8xX1ewGZErPgRgmZTzKdnI_AuR2B8EHI0B4ymMoG0=; Hm_lpvt_98beee57fd2ef70ccdd5ca52b9740c49=1759024214; BEC=e9bdbc10d489caddf435785a710b7029"

}

def download_and_yield_fav_collection(collection_id):
    # collection_id = "702943686"
    offset = 0
    limit = 20
    collection_url = "https://www.zhihu.com/api/v4/collections/{}/items".format(collection_id)
    html = requests.get(collection_url, headers=headers)
    html.raise_for_status()
    article_nums = html.json()['paging'].get('totals')

    while offset < article_nums:
        url_title_list = []
        collection_url = "https://www.zhihu.com/api/v4/collections/{}/items?offset={}&limit={}".format(collection_id,
                                                                                                       offset, limit)
        try:
            print(f"sending request {collection_url}")
            html = requests.get(collection_url, headers=headers)
            content = html.json()
            for el in content['data']:
                url = el['content']['url']
                if el['content']['type'] == 'answer':
                    title = el['content']['question']['title']
                else:
                    try:
                        title = el['content']['title']
                    except:
                        title = "没有标题"
                url_title_list.append((url, title))
            offset += limit
        except Exception as e:
            print(f"batch {offset}, {limit} failed...{e}")
        yield url_title_list


if __name__ == "__main__":
    """
    目标：将知乎收藏夹中的url -> title保存下来
    需要更新cookie，从浏览器登入知乎收藏夹url，然后按F12，切换至Network Tab -> Headers -> Request Headers -> Cookie
    将其复制下来粘贴至本脚本最上方headers.Cookie中
    每个收藏夹单独一个目录，将url -> title保存在这个目录中的url_list.txt中
    模拟一个WebArchiveVault的结构，从而可以用vault_take_snapshot脚本直接遍历爬取
    """
    collection_id_list = [
        # ["929449434", f"知乎{datetime.now().strftime('%Y%m%d')}"],
        ["989973899", f"20251001"],  # 先修改config_setting.py中的batch名称
        # ["473197161", "生活&百科&社会&政治&历史"],
        # ["475687874", "工具&效率&推荐"],
        # ["222681997", "开发&框架&方法&实践&工程&团队"],
        # ["311239756", "财经&股债&金融&政策"],
        # ["492081915", "方法&学习&进步"],
        # ["430080417", "娱乐&游戏&电影&文学&旅游"],
        # ["207889554", "创业&真知&格局&哲理"],
        # ["734831297", "架构&思想&协议&原理&行业&趋势"],
        # ["473632838", "产经&科技&行业&观察&趋势"],
        # ["643401185", "买房"],
        # ["146538462", "其他"],
    ]
    batchPath = f"./assets/{config_setting.batch_name}"
    if not os.path.isdir(batchPath):
        os.mkdir(batchPath)

    output_filename = "zhihu_url_list.txt"
    for collection_id in collection_id_list:
        dirpath = f"./assets/{config_setting.batch_name}/zhihu_fav/"
        if not os.path.isdir(dirpath):
            os.mkdir(dirpath)
        for batch_url_title_list in download_and_yield_fav_collection(collection_id[0]):
            print(f"writing to {os.path.join(dirpath, output_filename)}")
            util_store.append_url_list_with_subsidiary(os.path.join(dirpath, output_filename), batch_url_title_list)

print("done")