from time import sleep
import json

from rich import print as rprint

import req


rules = {
    'JavBus': {
        'indexUrl': 'https://www.javbus.com/page/{}',
        'lastIndex': 155,
        'fanhaoXpath': '//div[@class="item"]//date[1]/text()'
    },
    'JavDB': {
        'indexUrl': 'https://www.javlibrary.com/cn/vl_update.php?list&mode=&page={}',
        'lastIndex': 200,
        'fanhaoXpath': '//table[@class="videotextlist"]//tr[position()>1]/td/div/a/@title'
    }
}

    
s = set()

def get(rule):
    for i in range(1, rule['lastIndex']+1):
        while True:
            try:
                fanhaos = req.req3(rule['indexUrl'].format(i)).xpath(rule['fanhaoXpath'])
                break
            except Exception as e:
                rprint('[red]索引页解析出错')
                rprint('报错信息：', e)
                sleep(3)
        fanhaos = [fanhao.split('-')[0].lower() for fanhao in fanhaos]
        orign = len(s)
        s.update(set(fanhaos))
        now = len(s)
        rprint(f'增加了 {now-orign} 个')

# for site in rules:
#     # 未完成 rules 的编写
#     rule =  rules[site]
#     rprint(f'现在爬取的是：{site}')
#     get(rule)


with open('d:/data/list.json', 'r', encoding='utf-8') as f:
    keywords = json.loads(f.read())
blacklist = keywords['blacklist']

# l += blacklist
with open('d:/test.json', 'r', encoding='utf-8') as f:
    fanhaos = json.loads(f.read())
l = blacklist + fanhaos
l = list(set(l))
l.sort()
jsonData = json.dumps(l, indent=2, ensure_ascii=False)
with open('d:/test.json', 'w', encoding='utf-8') as f:
    f.write(jsonData)

