#!/usr/bin/python
# -*-coding:utf-8-*-
# pip install ddddocr 图片识别
"""传感器组管理"""
import asyncio
import json
import os
import time
from datetime import datetime,timedelta
from time import sleep
import requests
from dotenv import load_dotenv
import common
import db
import time
from threading import Thread
import threading

mysqldb = db.DbManager()
load_dotenv(verbose=True)
# 今天
today_time = common.TodayTime()
# 将-替换为.
today_time_new = today_time.replace('-', '.')
# 账号
username = os.getenv('ADMIN')
passworld = os.getenv('PWD')

# 获取登录后cookie
async def get_cookie(page):
    cookies_list = await page.cookies()
    cookies = ''
    for cookie in cookies_list:
        str_cookie = '{0}={1};'
        str_cookie = str_cookie.format(cookie.get('name'), cookie.get('value'))
        cookies += str_cookie
    print(cookies)
    return cookies


def list_split(items, n):
    return [items[i:i + n] for i in range(0, len(items), n)]


def curl(gate_id):
    Semih = (datetime.now() + timedelta(minutes=-60)).strftime('%Y-%m-%d %H:%M:%S')
    now_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
    cookie_str = "JSESSIONID=C800668DDA71827B670F50DD8D216AFE;"
    cookies = {cookies.split('=')[0]: cookies.split('=')[-1] for cookies in cookie_str.split('; ')}
    data = {
        'startTime': Semih,
        'endTime': now_time,
    }
    headers = {
        'referer': 'http://210.12.220.207:8003/adp/home/sensor/sensordata.jsp?sensorid'
                   '=40289e4f83b52f2c0184190752fd76d6&sn=SNS00187763',
        'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_16_0) AppleWebKit/537.36 (KHTML, like Gecko) '
                      'Chrome/71.0.3542.0 Safari/537.36',
    }
    resp = requests.get(
        "http://210.12.220.207:8003/adp/sensorservice/getHistoryData/"+gate_id,
        headers=headers, cookies=cookies, params=data,timeout=20)
    print(resp)
    json_text = json.loads(resp.text)
    print(json_text)
    list_html = json_text['datalist']
    print(len(list_html))
    time.sleep(1)

    list2 = list_split(list_html, 5)
    for list in range(len(list2)):
        l = []
        start = time.time()
        for i in range(len(list2[list])):
            # 查询是否存在 codeName
            list_html_list = list2[list][i]
            p = Thread(target=curl_data, args=(list_html_list,))  # 多进程
            l.append(p)
            p.start()
        for p in l:
            p.join()
        stop = time.time()
        # print(str(list2[list]['sn'])+"多线程耗时 %s" % (stop - start))
        sleep(0.5)


def curl_data(list_html_list):
    print(list_html_list)
    field = {'空气温度': 'data1', '空气湿度': 'data2', '光照强度': 'data3', '二氧化碳浓度': 'data4', '土壤温度': 'data5', '土壤湿度': 'data6',
             '土壤PH值':'data7','电导率': 'data8', '土壤氮含量': 'data9', '土壤磷含量': 'data10', '土壤钾含量': 'data11', '氨气浓度': 'data16',
             '硫化氢浓度': 'data17',
             '风速': 'data18', '风向': 'data19', '降雨量': 'data20', 'PM2.5': 'data21', 'PM10': 'data22', 'VOC': 'data23',
             '甲烷': 'data24', '大气压': 'data25','PM2.5浓度':'data21','光合有效辐射':'data26'}
    info = {
        'sn': list_html_list['sn'],
    }
    remark = {}
    for k in range(0, len(list_html_list['sensorfield'])):
        remark[list_html_list['sensorfield'][k]] = list_html_list['valuelist'][0]['fieldvalue'][k]
        print(k)
        print(list_html_list['sensorfield'][k])
        print(field[list_html_list['sensorfield'][k]])
        try:
            data_k = field[list_html_list['sensorfield'][k]]
            data_v = list_html_list['valuelist'][0]['fieldvalue'][k]
            info[data_k] = data_v
        except:
            # 写入文件 后期处理
            with open("error_sn.txt", 'a') as file:
                file.write(list_html_list['sn'] + "_" + list_html_list['sensorfield'][k] + "\n")
            continue
    info['create_date'] = list_html_list['valuelist'][0]['time']
    info['remark'] = json.dumps(remark, ensure_ascii=False)
    print(info)
    # import threading
    # lock = threading.Lock()
    # lock.acquire()
    mysqldb=db.DbManager()
    find_info = mysqldb.table_select_one(table='device_log', field="id",
                                         where={"sn": list_html_list['sn'],
                                                'create_date': list_html_list['valuelist'][0]['time']})
    # 存在数据则跳过
    if find_info:
        print("已存在数据 跳过")
    else:
        # 添加
        mysqldb.table_insert(table='device_log', data=info)
    # lock.release()

class Egas:
    def __init__(self):
        # 获取传感器实时数据 并且状态为正常 半小时前的数据
        # Semih=(datetime.now() + timedelta(minutes=-30)).strftime('%Y-%m-%d %H:%M:%S')
        Semih=(datetime.now() + timedelta(minutes=-60)).strftime('%Y-%m-%d %H:%M:%S')
        sql = 'SELECT sn FROM device_gate where state=0 and update_old>="'+Semih+'" order by id asc limit 1000'
        mysqldb.execute(sql)
        self.org_ids = mysqldb.cur.fetchall()
        asyncio.get_event_loop().run_until_complete(self.main())

    async def main(self):
        list2 = list_split(self.org_ids, 1)
        print(list2)
        for i in range(len(list2)):
            for key in range(len(list2[i])):
                curl(list2[i][key]['sn'])

Egas()
