# -*- coding:utf-8 -*-
from database.db_handler import MysqlHander
from database.db_business import DbBusiness
from common.my_http import MyHttp
from common.my_baidu import MyBaidu
from common.market_data import MarketData
from common.my_time import MyTime
import urllib.request
from bs4 import BeautifulSoup
import glob
import jieba
import re
import threading
import hashlib
import urllib.parse
import time
import os


'''
网络数据
'''


class BaiduKejipintai:
    def __init__(self):
        self.market = MarketData()
        self.business = DbBusiness()

    def check_site(self, site_name):
        site_lists = ["站长之家","中关村在线","新浪","搜狐","东方财富网","同花顺","网易","金融界","证券之星","每日经济新闻","中国财经信息网","凤凰","和讯","中国经济网","格隆汇","中国网","人民资讯","中证网","第一财经","证券时报","华夏时报","中华网","经济观察报","人民网","中国民航网"]
        for d in site_lists:
            if re.search(d, site_name):
                return True
        print(site_name)
        return False
    
    def get_data(self):
        pinpai = self.business.query_kejipinpai()
        end_date = MyTime.forward_relative_date(5)
        check_title2 = ["发布会","董事会","新品上市","收购","重组","并购","开发者","新技术","大会"]
        for d in pinpai:
            names = d.split("|")
            next_page = True
            words = [names[0], "动态"]
            for i in range(1,20):
                time.sleep(1)
                if next_page == False:
                    break
                url = MyBaidu.get_url_order_by_time(words, i)
                res = MyBaidu.get_baidu_data(url)
                for r in res:
                    data_date = MyBaidu.calc_date(r["date"])
                    if data_date < end_date:
                        next_page = False
                        break
                    if self.check_site(r["site"]) == False:
                        continue
                    if MyBaidu.check_title(r["title"], [names, check_title2]) == False:
                        continue
                    data_title = r["title"]
                    data_site = r["site"]
                    data_url = r["url"]
                    print(r["date"] + data_date)
                    self.market.add_market_data(data_date, 2300, data_title, data_url, data_site)

def kejipinpai_api():
    d = BaiduKejipintai()
    d.get_data()


if __name__ == '__main__':
    kejipinpai_api()
