# -*- coding:utf-8 -*-
from database.db_business import DbBusiness
from common.macro_data import MacroData
from common.my_time import MyTime
from common.my_baidu import MyBaidu
from common.my_file import MyFile
import re
import hashlib
import urllib.parse
import time
import os

class Base(object):

    business = DbBusiness()
    __log_file = '' #open('./out/' + os.path.split(__file__)[-1].split(".")[0] + ".log", 'a', encoding = 'utf-8')
    __all_cnt = 0
    __error_cnt = 0
    __type_id = 0
    __forward = 0
    __max_page = 0
    __check_title = True

    def __init__(self, file_name):
        __log_file = open(file_name, 'a', encoding = 'utf-8')
        pass
        '''
        #self.attr_v3 = AttrV3()
        self.business = DbBusiness()
        self.__log_file = open('./out/' + os.path.split(__file__)[-1].split(".")[0] + ".log", 'a', encoding = 'utf-8')
        MyFile.wrtie_log(self.__log_file, "开始")
        self.__all_cnt = 0
        self.__error_cnt = 0
        self.__type_id = 0
        self.__forward = 0
        self.__max_page = 0
        self.__check_title = True
        #self.query_names = []
        '''

    def __del__(self):
        MyFile.wrtie_log(self.__log_file, "all:[" + str(self.all) + "],error[" + str(self.error) + "]")
        MyFile.wrtie_log(self.__log_file, "结束")
        self.__log_file.close()

    def recode_data(self, data_date, data_title, data_url, data_site):
        print("recode_data error")
        pass

    def set_check_title(self, check_title):
        self.__check_title = check_title
        
    def set_type(self, type_id):
        self.__type_id = type_id

    def get_type(self):
        return self.__type_id
        
    def write_log(self, log):
        MyFile.wrtie_log(self.__log_file, log)
        
    def set_max_page(self, max_page):
        self.__max_page = max_page
        
    def set_forward(self, forward):
        self.__forward = forward
        self.__end_date = MyTime.forward_relative_date(forward)

    def set_end_date(self, end_date):
        self.__end_date = end_date

    def set_all_param(self, check_site, data_type, max_page, forward):
        self.set_check_title(check_site)
        self.set_type(data_type)
        self.set_max_page(max_page)
        self.set_forward(forward)
        
    #def set_query_names(self, query_names):
    #    self.query_names = query_names

    def check_site(self, site_name):
        site_lists = ["站长之家","中关村在线","新浪","搜狐","东方财富网","同花顺","网易","金融界","证券之星","每日经济新闻","中国财经信息网","凤凰","和讯","中国经济网","格隆汇","中国网","人民资讯","中证网","第一财经","证券时报","华夏时报","中华网","经济观察报","人民网","中国民航网"]
        for d in site_lists:
            if re.search(d, site_name):
                return True
        print(site_name)
        return False
    
    def get_data(self, words, title_check_array, title_check_deny = []):
        print("get_data")
        next_page = True
        for i in range(1, self.__max_page):
            #time.sleep(1)
            if next_page == False:
                break
            url = MyBaidu.get_url_order_by_time(words, i)
            try:
                self.__all_cnt = self.__all_cnt + 1
                proxy = self.business.query_proxy()
                print("proxy:" + proxy)
                res = MyBaidu.get_baidu_data_by_proxy(url, proxy)
                for r in res:
                    data_date = MyBaidu.calc_date(r["date"])
                    if data_date < self.__end_date:
                        next_page = False
                        break
                    if MyBaidu.not_support_site(r["site"]):
                        continue
                    if self.__check_title and self.check_site(r["site"]) == False:
                        print("check site fail")
                        continue
                    if MyBaidu.check_title(r["title"], title_check_array, title_check_deny) == False:
                        print("check title fail")
                        continue
                    data_title = r["title"]
                    data_site = r["site"]
                    data_url = r["url"]
                    #百家号基本都有对应源网站
                    if data_url.find("baijiahao") >= 0:
                        continue
                    print(r["date"] + data_date)
                    self.recode_data(data_date, data_title, data_url, data_site)
                    #table_name = self.attr_v3.get_table_name(3007)
                    #self.attr_v3.add_attr_data(table_name, k, data_title, data_date, data_url, data_site)
                    #self.market.add_market_data(data_date, 2301, data_title, data_url, data_site)
                if len(res) < 10:
                    break
            except Exception as e:
                self.__error_cnt = self.__error_cnt + 1
                #i = i - 1
                print(str(e))
                pass

    def get_data_dingzhi(self, words, title_check_array, title_check_deny = []):
        print("get_data")
        next_page = True
        for i in range(1, self.__max_page):
            #time.sleep(1)
            if next_page == False:
                break
            url = MyBaidu.get_url_order_by_time(words, i)
            try:
                self.__all_cnt = self.__all_cnt + 1
                proxy = self.business.query_proxy()
                print("proxy:" + proxy)
                res = MyBaidu.get_baidu_data_by_proxy(url, proxy)
                for r in res:
                    data_date = MyBaidu.calc_date(r["date"])
                    if data_date < self.__end_date:
                        next_page = False
                        break
                    if MyBaidu.not_support_site(r["site"]):
                        continue
                    if self.__check_title and self.check_site(r["site"]) == False:
                        print("check site fail")
                        continue
                    if MyBaidu.check_title_dingzhi(r["title"], title_check_array, title_check_deny) == False:
                        print("check title fail")
                        continue
                    data_title = r["title"]
                    data_site = r["site"]
                    data_url = r["url"]
                    #百家号基本都有对应源网站
                    if data_url.find("baijiahao") >= 0:
                        continue
                    print(r["date"] + data_date)
                    self.recode_data(data_date, data_title, data_url, data_site)
                    #table_name = self.attr_v3.get_table_name(3007)
                    #self.attr_v3.add_attr_data(table_name, k, data_title, data_date, data_url, data_site)
                    #self.market.add_market_data(data_date, 2301, data_title, data_url, data_site)
                if len(res) < 10:
                    break
            except Exception as e:
                self.__error_cnt = self.__error_cnt + 1
                #i = i - 1
                print(str(e))
                pass
