# -*- coding:utf-8 -*-
from database.db_handler import MysqlHander
from database.db_business import DbBusiness
from common.my_http import MyHttp
from common.my_baidu import MyBaidu
from common.my_time import MyTime
from common.my_file import MyFile
from common.macro_data import MacroData
import urllib.request
from bs4 import BeautifulSoup
import glob
import jieba
import re
import threading
import hashlib
import urllib.parse
import time
import os


'''
网络数据
'''

class BaiduMeiguo:
    def __init__(self):
        self.macro = MacroData()
        self.business = DbBusiness()
        self.log_file = open('./out/' + os.path.split(__file__)[-1].split(".")[0] + ".log", 'a', encoding = 'utf-8')
        MyFile.wrtie_log(self.log_file, "开始")
        self.all = 0
        self.error = 0

    def __del__(self):
        MyFile.wrtie_log(self.log_file, "all:" + str(self.all) + ",error:" + str(self.error))
        MyFile.wrtie_log(self.log_file, "结束")
        self.log_file.close()

    def get_data(self, words, check):
        end_date = MyTime.forward_relative_date(5)
        check_title2 = ["政策","主席","加息","降息","会议","利率"]
        next_page = True
        for i in range(1,6):
            #time.sleep(1)
            if next_page == False:
                break
            url = MyBaidu.get_url_order_by_time(words, i)
            try:
                proxy = self.business.query_proxy()
                res = MyBaidu.get_baidu_data_by_proxy(url, proxy)
                self.all = self.all + 1
                for r in res:
                    data_date = MyBaidu.calc_date(r["date"])
                    if data_date < end_date:
                        next_page = False
                        break
                    if MyBaidu.check_site(r["site"]) == False:
                        continue
                    if MyBaidu.check_title(r["title"], [words]) == False:
                        continue
                    data_title = r["title"]
                    data_site = r["site"]
                    data_url = r["url"]
                    print(r["date"] + data_date)
                    self.macro.add_macro_data(data_date, 200, data_title, data_url, data_site)
            except Exception as e:
                self.error = self.error + 1
                #i = i - 1
                pass

def meiguo_api():
    d = BaiduMeiguo()
    d.get_data(["白宫"],[["白宫"],[":"]])
    d.get_data(["美国国会"],[["美国国会"]])
    d.get_data(["美国","参议院"],[["美国"],["参议院"]])
    d.get_data(["美国","众议院"],[["美国"],["众议院"]])
    d.get_data(["五角大楼"],[["五角大楼"],[":"]])

if __name__ == '__main__':
    meiguo_api()