#!/usr/bin/env python
# -*- coding: utf-8 -*-
# author:Charles.chen
# createDate:2017/9/5
# Title:
# Tip:
from app.model import SensorData, RemoteCommand, Sensor
from app.toolkit.decoratorUtil import catchDbException
from app.toolkit.configSingle import ConfigSingle
from app.toolkit import commonUtil
from app.toolkit import fileManager
from app.crontime.schedulerJob import Quartz
from app.sensor.sensorService import getAllSensorList
import requests, json
import os


configObj = ConfigSingle()
quartz = Quartz()

# 备份数据文件 根据可用传感器进行备份 一天一次
# 生成文件同事 记录数据到数据库FileManager中
def backUpSensorDataJob(date=None):
    if date is None:
        date = commonUtil.getNowFormat("%Y%m%d%H%M%S")
    sensorList = getAllSensorList()
    sensorDict = commonUtil.convertDbObjToDict(sensorList, Sensor)
    for sensor in sensorDict:
        sensor_no = sensor["sensor_no"]
        filename = "sensor_{sensorNo}_{date}".format(sensorNo=sensor_no, date=date)
        filePath = "backups/files/{fileName}".format(fileName=filename) + ".txt"
        data = getSensorDataFromSqlLastDay(sensor_no)
        if data is None:
            sensorData = []
        else:
            sensorData = commonUtil.convertDbObjToDict(data, SensorData)
        with open(filePath, "w+") as f:
            for item in sensorData:
                f.write(json.dumps(item)+"\n")
                pass
        fileSize = os.path.getsize(filePath)
        # insert into sql
        fileManager.addNewFileRow(name="filename", suffix="txt", size=fileSize, type="sensor")

    pass

# 备份配置文件服务 一天一次
def backUpConfigJob(date=None):
    os.popen("")


    pass

# 定时同步服务器 本地的传感器数据 同步处理 获得返回结果之后重置数据的发送位标记 可更改
def syncPushDataJob():
    print "postSensorData begin"
    s = requests.session()
    s.keep_alive = False
    domain_address = configObj.configuration.get("internet_conf", 'remote_address')
    post_url = configObj.configuration.get("remote_url", 'post_data')
    final_path = commonUtil.buildHttpRequestUrl(domain_address, post_url)
    post_data = getSyncSensorData(100)
    post_array, id_array = buildPostData(post_data)
    if post_array and id_array:
        # 使用{'Connection': 'close'}来处理ConnectionError: HTTPConnectionPool(): Max retries exceeded with url问题
        res = requests.post(final_path, json.dumps({"post_data": post_array}), headers={'Connection': 'close'})
        if res.content and res.status_code == 200 and json.loads(res.content)["status"] == 1:
            updateSensorDataStatusByIds(id_array)
            pass
        else:
            # 数据推送失败 推送失败次数+1
            current_retry = configObj.configuration.get("system_conf", "current_retry")
            configObj.configuration.set("system_conf", "current_retry",  str(int(current_retry)+1))
            configObj.flushConfig()
            return
            pass
        # 添加远程命令
        if res.content and res.status_code == 200 and json.loads(res.content)["command"]:
            handleCommand(json.loads(res.content)["command"])
            pass
    print "postSensorData over"
    pass

# 在网络失败的情况下 进行轮询.180S一次 3min 一次
# 若成功 返回已上传数据的偏移量(时间戳表示偏移量)
def applyNetworkCheckJob():
    print "applyNetworkCheckJob begin"
    s = requests.session()
    s.keep_alive = False
    remote_address = ""
    remote_interface = ""
    if True:
        quartz.sched.pause_job("applyNetworkCheck")
        quartz.sched.resume_job("syncPushData")
        configObj.configuration.set("system_conf", 'current_retry', 0)
        configObj.flushConfig()
    # get_url = commonUtil.buildHttpRequestUrl(remote_address, remote_interface)
    # res = requests.post(get_url, json.dumps({"post_data": post_array}), headers={'Connection': 'close'})
    # if res.content and res.status_code == 200 and json.loads(res.content)["status"] == 1:
    #     # 根据云端返回的结果 重置各个传感器的推送数据偏移量
    #     time_dict = res.data
    #     updateSensorDataStatusByTime(time_dict)
    #     # 同时将当前请求失败次数 重置为0
    #     configObj.configuration.set("system_conf", 'current_retry', 0)
    #     configObj.flushConfig()
    #     pass
    print "applyNetworkCheckJob end"
    pass

# 检测数据sd卡使用量 若超量 则启用sd卡策略 在策略0的情况下：循环删除1M数据 30min分钟一次
def monitorTFJob():
    print "monitorTFJob begin"
    storage_maxSize = configObj.configuration.get("system_conf", "storage_maxSize")
    # 先检查当前sd卡容量
    current_size = None
    with os.popen("cd / && df | grep /mnt") as ps:
        strLine = ' '.join(ps.readline().split())
        # 1 最大值 2 已使用值 byte
        res = strLine.split(" ")
        current_size = int(int(res[2])/1024)
    if current_size is None:
        return
    if storage_maxSize > (current_size+500):
        # 查看定时任务中是否有定时删除数据的服务，若存在 则移除服务
        quartz.sched.pause_job("clearSensorData")
        pass
    else:
        # 查看存储策略
        storage_method = configObj.configuration.get("system_conf", "storage_method")
        # 根据策略启动的服务
        if storage_method == 0:
            pass
        elif storage_method == 1:
            quartz.sched.resume_job("clearSensorData")
            pass
        pass
    print "monitorTFJob end"
    pass

# 删除传感器数据定时服务 正序删除 60S一次
# 返回删除数量
def clearSensorDataJob():
    print "clearSensorDataJob begin"
    return SensorData.delete().where(SensorData.is_post == False).order_by(SensorData.created_time.asc()).limit(1000).execute()

# 检查当前请求失败最大尝试次数 如果超过设置的最大值 则开启链接请求服务
def checkSensorDataJob():
    print "checkSensorDataJob begin"
    current_retry = configObj.configuration.get("system_conf", 'current_retry')
    max_retry = configObj.configuration.get("system_conf", 'max_retry')
    if current_retry > max_retry:
        quartz.sched.pause_job("syncPushData")
        quartz.sched.resume_job("applyNetworkCheck")
        pass
    print "checkSensorDataJob end"
    pass

# -----------------------定时任务分界线-------------------------------------------

# 重置数据推送请求失败次数
def resetMaxRetry():
    base_url = ''
    get_url = base_url+'/'
    res = requests.get(get_url)
    if res and res['status'] == 200:
        pass
    else:
        pass
    pass

# 获取未发送的传感器采集数据 每次数量：200
def getSyncSensorData(limit=200):
    cursor = getUnPostSensorData(limit)
    return cursor

# 从数据库中获取未被推送的传感器数据
@catchDbException
def getUnPostSensorData(limit):
    return SensorData.select().order_by(SensorData.id.desc()).limit(limit).execute()
    # return SensorData.select().where(SensorData.is_post==False).order_by(SensorData.id.desc()).limit(limit).execute()


# 构建传感器采集数据的 推送结构
def buildPostData(data=None):
    post_array = []
    id_array = []
    if data:
        for i in data:
            post_array.append({
                "param": i.param_name,
                "val": i.val,
                "collect_time": i.created_time,
                "sensor_no": i.sensor_no
            })
            id_array.append(i.id)
    return post_array, id_array

# 执行远程command
# return
# command={}
#
#
#

def executeCommand(command=None):
    command_dict = json.loads(command)
    command = {
        "id": "",
        "type": "",
        "sensor_no": "",
        "action": ""
    }
    pass


# 根据传感器编号从数据库中获取昨天的每个传感器的数据
@catchDbException
def getSensorDataFromSqlLastDay(sensorNo=None):
    # 昨天00:00到23:59的数据 时间戳形式
    lastDayBegin = commonUtil.getLastDayUnixBegin()
    lastDayEnd = commonUtil.getLastDayUnixEnd()

    lastDayBegin = 1505441416
    lastDayEnd = 1505442945
    if sensorNo is None:
        return SensorData.select().where(
            SensorData.created_time >= lastDayBegin and SensorData.created_time <= lastDayEnd).order_by(
            SensorData.created_time.desc).execute()
    else:
        return SensorData.select().where(SensorData.sensor_no==sensorNo and
            SensorData.created_time >= lastDayBegin and SensorData.created_time <= lastDayEnd).order_by(
            SensorData.created_time.desc).execute()



# 传感器采集数据推送完成之后 修改推送状态 避免二次推送 同时开启数据推送服务 关闭链接请求服务器
# {
#   "sensor_no1":1505460815,
#   "sensor_no2":1505460816
# }
def updateSensorDataStatusByTime(timestamps={}):
    for key, val in timestamps:
        SensorData.update(is_post=False).where(
            SensorData.sensor_no == key and SensorData.created_time > val).execute()
        SensorData.update(is_post=True).where(
            SensorData.sensor_no == key and SensorData.created_time << val).execute()
        pass
    else:
        # 开启数据推送服务 关闭网络再链接服务
        quartz.stopAndStartJob(startJob="", stopJob="")
        pass

    return

# 传感器采集数据推送完成之后 修改推送状态 避免二次推送
# 根据数据id
def updateSensorDataStatusByIds(id_array=None):
    return SensorData.update(is_post=True).where(SensorData.id << id_array).execute()


# 处理请求返回的command命令
# return None/True/False
def handleCommand(command=None):
    if command is None:
        return
    insert_row = {
        "json_text": command,
        "created_time": getNowStr(),
        "is_executed": False
    }
    try:
        RemoteCommand.insert(insert_row).execute()
        res = True
        pass
    except Exception:
        res = False
        pass
    return res
    pass





if __name__ == '__main__':
    pass