#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @FileName  :get_jianjiao.py
# @Time      :2023/11/8 
# @Author    :CL
# @email     :1037654919@qq.com
# 采集建交数据
import requests
import pandas as pd
from bs4 import BeautifulSoup
from utils import mongo_manager
waijiaobu_jianjiao = mongo_manager('waijiaobu_jianjiao',db = 'waijiaobu')

url = "https://www.mfa.gov.cn/web/ziliao_674904/2193_674977/200812/t20081221_9284708.shtml"
headers = {
    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
    "Accept-Language": "zh-CN,zh;q=0.9",
    "Cache-Control": "no-cache",
    "Connection": "keep-alive",
    "Pragma": "no-cache",
    "Referer": "https://www.mfa.gov.cn/web/ziliao_674904/2193_674977/",
    "Sec-Fetch-Dest": "document",
    "Sec-Fetch-Mode": "navigate",
    "Sec-Fetch-Site": "same-origin",
    "Sec-Fetch-User": "?1",
    "Upgrade-Insecure-Requests": "1",
    "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36",
    "sec-ch-ua": "\"Not.A/Brand\";v=\"8\", \"Chromium\";v=\"114\", \"Google Chrome\";v=\"114\"",
    "sec-ch-ua-mobile": "?0",
    "sec-ch-ua-platform": "\"Linux\""
}
cookies = {
    "HMF_CI": "5a16d8bf0a3ab42047c26c3c3b5b60f423c865ec968964f521c79918fc4dac33f15dfed0818218fe0c7b40f92bff57dfba05612c7105ea346b813bfa47a317cb95",
    "_trs_uv": "lop388ic_469_fpck",
    "arialoadData": "true",
    "ariawapChangeViewPort": "false",
    "HMY_JC": "2b3e7b8a3422e84f67635d6418589539d4c9880cc77967bea6469af8ffbe3b8fe8,",
    "_trs_ua_s_1": "lopi09qd_469_gnew",
    "HBB_HC": "82ad3266a9ae457d8a5e6bbae562e847cc041f3930b85e34cd6dc524cc642e2008da343ed8ad967e5afe6090c4b29d36e9"
}
def get_jianjiao_data():

    response = requests.get(url, headers=headers, cookies=cookies)
    response.encoding ="utf-8"
    # print(response.text)
    print(response)
    soup = BeautifulSoup(response.text, 'lxml')
    contents = soup.select('table')  # [0]将返回的list改为bs4类型
    cons =['亚洲','非洲','欧洲','美洲','大洋洲']
    for continent,content in zip(cons,contents):
        tbl = pd.read_html(content.prettify(), header=0)[0]
        print(tbl.columns)
        for row in tbl.iterrows():
            reslut = {'_id': row[1]['国名'],'continent':continent,'国名': row[1]['国名'],'建交日': row[1]['建交日']}
            print(reslut)
            try:
                waijiaobu_jianjiao.insertOne(reslut)
            except:
                waijiaobu_jianjiao.updateOne({'_id': row[1]['国名']},reslut)
            # break

if __name__ == '__main__':
    print()
    get_jianjiao_data()
