# -*- coding:utf-8 -*-
from database.db_business import DbBusiness
from common.my_time import MyTime
from common.my_baidu import MyBaidu
from common.attr_v3 import AttrV3
from bs4 import BeautifulSoup
from common.my_file import MyFile
import glob
import jieba
import re
import threading
import hashlib
import urllib.parse
import time
import os
from common.market_data import MarketData


'''
网络数据
'''

class BaiduHangyungongsi:
    def __init__(self):
        self.attr_v3 = AttrV3()
        self.business = DbBusiness()
        self.market = MarketData()
        self.log_file = open('./out/' + os.path.split(__file__)[-1].split(".")[0] + ".log", 'a', encoding = 'utf-8')
        MyFile.wrtie_log(self.log_file, "开始")

    def __del__(self):
        MyFile.wrtie_log(self.log_file, "结束")
        self.log_file.close()

    def check_site(self, site_name):
        site_lists = ["站长之家","中关村在线","新浪","搜狐","东方财富网","同花顺","网易","金融界","证券之星","每日经济新闻","中国财经信息网","凤凰","和讯","中国经济网","格隆汇","中国网","人民资讯","中证网","第一财经","证券时报","华夏时报","中华网","经济观察报","人民网","中国民航网"]
        for d in site_lists:
            if re.search(d, site_name):
                return True
        print(site_name)
        return False
    
    def get_data(self, word):
        datas = ["马士基","地中海航运公司","达飞轮船","常青线","中国远洋","赫伯罗特海运","美国总统轮船有限公司","韩进海运公司","中远海运","商船三井"]
        end_date = MyTime.forward_relative_date(30)
        check_title = [word]
        for k in datas:
            next_page = True
            words = [k, word]
            for i in range(1,10):
                #time.sleep(1)
                if next_page == False:
                    break
                url = MyBaidu.get_url_order_by_time(words, i)
                try:
                    proxy = self.business.query_proxy()
                    print(proxy)
                    res = MyBaidu.get_baidu_data_by_proxy(url, proxy)
                    for r in res:
                        data_date = MyBaidu.calc_date(r["date"])
                        if data_date < end_date:
                            next_page = False
                            break
                        #if self.check_site(r["site"]) == False:
                        #    continue
                        if MyBaidu.check_title(r["title"], [[k], check_title]) == False:
                            continue
                        data_title = r["title"]
                        data_site = r["site"]
                        data_url = r["url"]
                        print(r["date"] + data_date)
                        self.market.add_market_data(data_date, 2303, data_title, data_url, data_site)
                except Exception as e:
                    print(str(e))
                    i = i - 1
                    pass

def hangyungongsi_api():
    d = BaiduHangyungongsi()
    d.get_data("运费")
    d.get_data("运价")
    d.get_data("运输量")
    d.get_data("货量")

if __name__ == '__main__':
    hangyungongsi_api()

