# coding = utf-8 #
import requests
from bs4 import BeautifulSoup
from time import sleep
import sys



HEADER = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
          'Accept-Encoding': 'gzip, deflate, compress',
          'Accept-Language': 'en-us;q=0.5,en;q=0.3',
          'Cache-Control': 'max-age=0',
          'Connection': 'keep-alive',
          'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:22.0) Gecko/20100101 Firefox/22.0'}


def load_list(path):
    with open(path, 'r', encoding='utf-8') as f:
        for i in f:
            yield i[:-1]



def get_baike_info(query):
    """

    """
    item_url = f"https://baike.baidu.com/item/{query}"
    search_url = f"https://baike.baidu.com/search/none?word={query}&enc=utf8"
    status, content = url_test(item_url)
    if status == 0:
        print(query, "over")
        return {query: content}
    else:
        item_url = get_simillar(search_url)
        status, content = url_test(item_url)
        print(query, "search over")
    return {query: content}


def url_test(url):
    """
    """
    if not url:
        return 1, ""
    response = requests.get(url, headers=HEADER)
    document = BeautifulSoup(response.content, 'html.parser')
    summary_tag = document.find_all('div',attrs={'class':'lemma-summary'})
    if summary_tag:
        return 0, summary_tag[0].text
    else:
        return 1, ""

def get_simillar(url):
    """

    """
    # print("搜索", url)
    response = requests.get(url, headers=HEADER)
    response.encoding = response.apparent_encoding
    document = BeautifulSoup(response.content, 'html.parser')
    tag = document.find_all('a', attrs={"class": "result-title", "target":"_blank"})
    if tag:
        first_url = tag[0]
        dst_url = first_url['href']
        if 'https://baike.baidu.com' not in dst_url:
            dst_url = 'https://baike.baidu.com' + dst_url
        return dst_url
    else:
        return ""
    # print(first_url)
    
if __name__ == "__main__":
    path = "C:/Users/65152/Desktop/classInd2.txt"
    result = {}
    for i in load_list(path):
        tmp = get_baike_info(i)
        for j in tmp:
            result[j] = tmp[j]
        sleep(10)
    with open("C:/Users/65152/Desktop/baike_result.txt",'w', encoding='utf-8') as f:
        f.write(str(result) + "\n")
    
