# -*- coding:utf-8 -*-
from database.db_business import DbBusiness
from common.my_time import MyTime
from common.my_baidu import MyBaidu
from common.attr_v3 import AttrV3
from bs4 import BeautifulSoup
import glob
import jieba
import re
import threading
import hashlib
import urllib.parse
import time
import os
from common.market_data import MarketData


'''
网络数据
'''


class BaiduQihuojiaoyisuo:
    def __init__(self):
        self.attr_v3 = AttrV3()
        self.business = DbBusiness()
        self.market = MarketData()

    def check_site(self, site_name):
        site_lists = ["站长之家","中关村在线","新浪","搜狐","东方财富网","同花顺","网易","金融界","证券之星","每日经济新闻","中国财经信息网","凤凰","和讯","中国经济网","格隆汇","中国网","人民资讯","中证网","第一财经","证券时报","华夏时报","中华网","经济观察报","人民网","中国民航网"]
        for d in site_lists:
            if re.search(d, site_name):
                return True
        print(site_name)
        return False
    
    def get_data(self, word):
        datas = ["上海期货交易所","大连商品交易所","郑州商品交易所","芝加哥期货交易所","纽约期货交易所","纽约金属交易所","纽约商品交易所","堪萨斯商品交易所","伦敦金属交易所","泛欧交易所","伦敦商品交易所","伦敦金属交易所","法国期货交易所","德国期货交易所","东京商品交易所","新加坡商品交易所","香港期货交易所","台湾期货交易所","南非期货交易所","韩国期货交易所"]
        end_date = MyTime.forward_relative_date(7)
        check_title = [word]
        for k in datas:
            next_page = True
            words = [k, word]
            for i in range(1,5):
                #time.sleep(1)
                if next_page == False:
                    break
                url = MyBaidu.get_url_order_by_time(words, i)
                try:
                    proxy = self.business.query_proxy()
                    res = MyBaidu.get_baidu_data_by_proxy(url, proxy)
                    for r in res:
                        data_date = MyBaidu.calc_date(r["date"])
                        if data_date < end_date:
                            next_page = False
                            break
                        #if self.check_site(r["site"]) == False:
                        #    continue
                        if MyBaidu.check_title(r["title"], [[k], check_title]) == False:
                            continue
                        data_title = r["title"]
                        data_site = r["site"]
                        data_url = r["url"]
                        print(r["date"] + data_date)
                        self.market.add_market_data(data_date, 2100, data_title, data_url, data_site)
                except Exception as e:
                    #i = i - 1
                    pass

def qihuojiaoyisuo_api():
    d = BaiduQihuojiaoyisuo()
    d.get_data("期价")
    d.get_data("合约")
    d.get_data("商品")
    d.get_data("产品")
    d.get_data("品种")
    d.get_data("推出")
    d.get_data("上线")
    d.get_data("批准")


if __name__ == '__main__':
    qihuojiaoyisuo_api()

