# -*- coding:utf-8 -*-
from database.db_handler import MysqlHander
from database.db_business import DbBusiness
from common.my_http import MyHttp
from common.my_baidu import MyBaidu
from common.my_time import MyTime
from common.my_file import MyFile
from common.macro_data import MacroData
import urllib.request
from bs4 import BeautifulSoup
import glob
import jieba
import re
import threading
import hashlib
import urllib.parse
import time
import os


'''
网络数据
'''


class BaiduMeilianchu:
    def __init__(self):
        self.macro = MacroData()
        self.business = DbBusiness()
        self.log_file = open('./out/' + os.path.split(__file__)[-1].split(".")[0] + ".log", 'a', encoding = 'utf-8')
        MyFile.wrtie_log(self.log_file, "开始")
        self.all = 0
        self.error = 0

    def __del__(self):
        MyFile.wrtie_log(self.log_file, "all:" + str(self.all) + ",error:" + str(self.error))
        MyFile.wrtie_log(self.log_file, "结束")
        self.log_file.close()

    def get_data(self, word):
        end_date = MyTime.forward_relative_date(5)
        check_title2 = ["政策","主席","加息","降息","会议","利率"]
        next_page = True
        words = [word]
        for i in range(1,6):
            #time.sleep(1)
            if next_page == False:
                break
            url = MyBaidu.get_url_order_by_time(words, i)
            try:
                proxy = self.business.query_proxy()
                res = MyBaidu.get_baidu_data_by_proxy(url, proxy)
                self.all = self.all + 1
                for r in res:
                    data_date = MyBaidu.calc_date(r["date"])
                    if data_date < end_date:
                        next_page = False
                        break
                    if MyBaidu.check_site(r["site"]) == False:
                        continue
                    if MyBaidu.check_title(r["title"], [[word], check_title2]) == False:
                        continue
                    data_title = r["title"]
                    data_site = r["site"]
                    data_url = r["url"]
                    print(r["date"] + data_date)
                    self.macro.add_macro_data(data_date, 301, data_title, data_url, data_site)
            except Exception as e:
                self.error = self.error + 1
                #i = i - 1
                pass


def meilianchu_api():
    d = BaiduMeilianchu()
    yanghang = ["中国人民银行","美联储","英国央行","阿根廷央行","阿根廷央行","亚美尼亚央行","亚美尼亚央行","阿鲁巴央行","澳大利亚央行","奥地利央行","巴林央行","比利时央行","西非国家银行","玻利维亚央行","巴西央行","保加利亚央行","西非国家银行","加拿大央行","智利央行","哥伦比亚央行","哥斯达黎加央行","西非国家银行","克罗地亚央行","塞浦路斯央行","捷克央行","丹麦央行","东加勒比海央行","厄瓜多尔央行","萨尔瓦多央行","爱沙尼亚央行","欧洲央行","芬兰央行","法国央行","德国央行","希腊央行","危地马拉央行","香港金融管理局","匈牙利央行","冰岛央行","印度央行","印度尼西亚央行","爱尔兰央行","以色列央行","意大利央行","日本央行","韩国央行","科威特央行","拉脱维亚央行","卢森堡央行","墨西哥央行","荷兰央行","新西兰央行","挪威央行","秘鲁央行","波兰央行","葡萄牙央行","卡塔尔央行","俄罗斯央行","新加坡央行","南非央行","西班牙央行","瑞典央行","瑞士央行","泰国央行","土耳其央行","乌克兰央行"]
    for k in yanghang:
        d.get_data(k)

if __name__ == '__main__':
    meilianchu_api()
