# -*- coding:utf-8 -*-
# !/usr/bin/env
# @description 测试环境数据库对象文件
# @author:jack.spanrrows
# @datetime: 2018/07/30 23:29
# @copyRight jack.spanrrows@gmail.com


from datetime import datetime, timedelta
from pymongo import MongoClient as client
from urllib import request
import time
import pymongo
import pycurl
from io import *
from bson.son import SON

import os, sys
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(BASE_DIR)

DOCKER_IDC_RC = "/www/web/IDC_RC"
IDC_RC = "/var/html/IDC_RC"
IDC_ONLINE = "/www/web/IDC_ONLINE"
db = True
constants = True
if os.path.isfile(DOCKER_IDC_RC) or os.path.isfile(IDC_RC):
    from scrapy_web_py3.config.testing import constants
elif os.path.isfile(IDC_ONLINE):
    from scrapy_web_py3.config.online import constants
from scrapy_web_py3.librarys import common

class MongoCache(object):
    def __init__(self, url = None, expires=None, mongo_key = "timer", mongo_value = pymongo.ASCENDING, type = None):
        # self.client = client(db.mongo_config['mongodb']['host'], db.mongo_config['mongodb']['port'])
        self.client = client(constants.DBHOST, constants.DBPORT)
        self.db = self.client.dissertation_advertist_web
        self.mongo_key = mongo_key
        self.mongo_value = mongo_value
        # 建立过期时间索引
        if expires is None:
            self.expires = timedelta(seconds = constants.MONGODB_EXPIRE)
        else:
            self.expires = timedelta(seconds = expires)
        # 设置缓存失效时间 4+1分钟 可以自己设置
        self.db.webpage.create_index([(self.mongo_key, self.mongo_value)], expireAfterSeconds=self.expires.total_seconds())

        self.type = type
        if url is not None:
            self.url = url
        #     self.get_data()

    def check_index_exists(self):
        get_indexes = self.db.webpage.list_indexes()
        for h in get_indexes :
            data = dict(SON(h))
            getKey = dict(SON(data['key']))
            if 'timer' in getKey.keys():
                print(getKey['timer'])

    def get_data(self, source = '', accept = False, encoding='utf-8'):
        common.var_dump("当前查找_id:url|_id:%s" % self.url)
        return_data = self.db.webpage.find_one({'_id':self.url})
        if return_data and 'html' in return_data.keys():
            return return_data['html']
        else:
            return self.insert_db(source, accept, encoding)

    def find_one(self, key, val, **field_dict):
        """
        根据指定key,value查出缓存数据
        :param key:
        :param val:
        :return:
        """
        field_dict = field_dict['field_dict']
        common.var_dump("当前查找key:val|%s:%s" % (key, val))
        return_data = self.db.webpage.find_one({key:val}, field_dict)
        if return_data:
            return return_data
        else:
            return False

    def find_more(self, key, val, **field_dict):
        field_dict = field_dict['field_dict']
        common.var_dump("当前查找key:val|%s:%s" % (key, val))
        return_data = self.db.webpage.find({key:val}, field_dict)
        if return_data is not None:
            return return_data
        else:
            return False

    def insert_db(self, source='', accept=False, encoding='gb2312'):
        """
        插入数据
        :return:
        """
        timestamp = datetime.utcnow()
        return_data = self.curl_post(accept,encoding)
        if isinstance(return_data, dict):
            if return_data[constants.STATUS]['code'] != 0:
                common.var_dump("当前接口没有数据%s" % return_data[constants.STATUS]['message'])
                return False
        if return_data is None or return_data == "":
            return False
        return_data = common.filter_sporttery_json(return_data)
        if return_data is False:
            print("当前curl出现异常,程序暂时退出")
            exit(0)
        mongo_dict = {self.mongo_key : timestamp, 'html':return_data,'timestamp':int(time.time())}
        common.var_dump("'_id' : %s" % self.url, constants.mongo_logs)
        common.var_dump(mongo_dict, constants.mongo_logs, debug_open = False, is_print=False)
        self.db.webpage.update({'_id' : self.url},{self.mongo_key : timestamp, 'html':return_data,'timestamp':int(time.time()), 'source':source}, upsert=True)
        return return_data

    def create_one_data(self, _id=0, **data):
        """
        根据_id 和data参数　存入缓存中
        :param _id: 唯一主键
        :param data:　字典类型
        :return:
        """
        timestamp = datetime.utcnow()
        data = data['data']
        if data is None:
            data = {'add_date':timestamp, 'timestamp':int(time.time()),'status':1}
        common.var_dump("存入mongo_id:%s" % _id, constants.mongo_logs)
        common.var_dump(data, constants.mongo_logs, debug_open = False)
        self.db.webpage.update({'_id':_id}, data, upsert=True)

    def create_new_date(self, source = None, type = None, **data):
        """
        存储永不过期数据
        :param source: 来源网站首页地址
        :param type: 类型
        :param data: 字典 dict
        :return: 创建数据
        """
        data = data['data']
        if source is None:
            source = constants.LAGOU_URL
        if type is None:
            type = constants.LAGOU_TYPE
        add_date = datetime.utcnow()
        for url in data:
            common.var_dump("_id:%s" % url, constants.mongo_logs)
            mongo_dict = {'add_date':add_date, 'timestamp':int(time.time()), 'status':1, 'source':source, 'type':type}
            common.var_dump(mongo_dict, constants.mongo_logs)
            self.db.webpage.update({'_id':'%s' % url}, mongo_dict, upsert = True)

    def urllib_html(self):
        self.html = request.urlopen(self.url).read().decode("gb2312", "ignore")

    def curl_post(self, accept = False, encoding='gb2312'):
        try:
            url = None
            cu = pycurl.Curl()
            # cu.setopt(pycurl.POST, 1)
            cu.setopt(pycurl.CONNECTTIMEOUT, 30)
            buffer = BytesIO()
            if accept is True:
                cu.setopt(pycurl.ACCEPT_ENCODING, "gzip, deflate")
            cu.setopt(cu.WRITEDATA, buffer)
            if self.type == constants.ADD_MIN_TIMESTAMP:
                url = "%s%s" % (self.url, common.get_micro_time())
            if self.type == constants.ADD_DAY:
                url = "%s%s" % (self.url, constants.today)
            if self.type is None:
                cu.setopt(pycurl.URL, self.url)
            else:
                cu.setopt(pycurl.URL, url)
            cu.setopt(pycurl.USERAGENT, "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.75 Safari/537.36")
            cu.setopt(pycurl.HTTPHEADER, ["Expect:"])
            cu.perform()
            # self.html = buffer.getvalue().decode("gb2312", "ignore")
            self.html = buffer.getvalue().decode(encoding, "ignore")
            cu.close()
            return self.html
        except (TimeoutError, Exception, ConnectionRefusedError) as e:
            print("[%s -- %s ]" % (common.get_date(), str(e)))
            return False

# if __name__ == "__main__":
# 
#     mcache = MongoCache(url=constants.LAGOU_URL, mongo_key=constants.LAGOU_KEY).get_data(encoding='utf-8')
#     print(mcache)

