# -*- coding:utf-8 -*-
# @Author: Gaiting
# @Time: 2023/6/2 13:54
# @Function:
import time
import redis
import pymysql
import pandas
import datetime
import json

from retry import retry


class DailyAlarmPointRule2():

    def run(self, data=""):
        self.cur, self.conn = self._db_util()
        self.project_id, delta,self.data_time = str(data).split(',')
        # global CUR_TIME
        cur_time = datetime.datetime.now()
        # cur_time = datetime.datetime(2023,8,22,14,0,0)
        # cur_time = CUR_TIME
        cur_min = 15 * (cur_time.minute // 15)
        if self.data_time=='0':
            self.data_time = (datetime.datetime(cur_time.year,
                                                cur_time.month,
                                                cur_time.day,
                                                cur_time.hour,
                                                cur_min, 00)
                              - datetime.timedelta(minutes=(int(delta) + 1) * 15)).strftime('%Y-%m-%d %H:%M:00')
            print(self.data_time,"aaa")
        # 新增某一时间的报警点
        self.insert_new_alarm_list()

    def insert_new_alarm_list(self):
        sql = f'''
                select t1.data_time, t1.ent_id, t1.yield_dev_id, t1.dev_name as yield_dev_name, t1.status as status1, t1.value as yield_value, t1.rated_power as yield_rated_power,
               t2.treat_dev_id as dev_id, t2.dev_name, t2.status as status2, t2.value as treat_value, t2.rated_power as treat_rated_power,
               t1.project_id, t2.ent_name, t2.industry_type_id,
               t2.industry_type, t2.county_id, t2.county_name, t2.town_id, t2.town_name,
                4 as warn_type from
                    (select distinct a.id as ent_id,
                            c.yield_dev_id,
                           c.treat_dev_id,
                           b.dev_name,
                            d.status,
                            d.data_time,
                            a.project_id,
                                     ifnull(b.median_rated_power, b.rated_power) as rated_power,
                                     f.value
                    from elec_enterprise_info as a
                        left join rel_yield_treat as c on c.ent_id=a.id
                        left join elec_dev_info as b on b.ent_id = c.ent_id and b.id=c.yield_dev_id
                        left join (select guid, devid,project_id from elecdata_basic_info where name='总正向有功电度' and is_deleted=0) as e on b.id = e.devid
                        left join elecdata_dev_status as d on b.guid=d.guid
                        left join elecdata_convert_data as f on f.guid = e.guid and f.data_time=d.data_time and f.time_type=1
                        where d.data_time='{self.data_time}' and a.is_deleted=0 and b.is_deleted=0 and c.is_deleted=0 and d.is_deleted=0 and f.is_deleted=0
                        and b.dev_name not like '%数码印刷机%'
                        ) as t1
                inner join
                    (select distinct a.id as ent_id,
                    a.ent_name, a.industry_type_id, e.industry_type, a.county_id, a.county_name, a.town_id, a.town_name,
                           c.treat_dev_id,
                           b.dev_name,
                            d.status,
                            d.data_time,
                            a.project_id,
                                     ifnull(b.median_rated_power, b.rated_power) as rated_power,
                            g.value
                    from elec_enterprise_info as a
                    left join dict_industry_type as e on a.industry_type_id=e.id
                        left join rel_yield_treat as c on c.ent_id=a.id
                        left join elec_dev_info as b on b.ent_id = c.ent_id and b.id=c.treat_dev_id
                        left join (select guid, devid,project_id from elecdata_basic_info where name='总正向有功电度' and is_deleted=0) as f on b.id = f.devid
                        left join elecdata_dev_status as d on b.guid=d.guid
                        left join elecdata_convert_data as g on g.guid = f.guid and g.data_time=d.data_time and g.time_type=1
                        where d.data_time='{self.data_time}' and a.is_deleted=0 and b.is_deleted=0 and c.is_deleted=0 and d.is_deleted=0 and g.is_deleted=0
                        ) as t2
                on t1.treat_dev_id=t2.treat_dev_id and t1.ent_id=t2.ent_id and t1.data_time=t2.data_time
                where t1.value>=0.2 and t1.value>=(t1.rated_power*0.2) and t2.value<(t2.rated_power*0.1)
                '''

        print(sql,"aaa")
        res_pd = self._db_res_to_df(sql)

        if not res_pd.empty:
            new_pd = pandas.DataFrame()
            for k, v in res_pd.groupby(by=['data_time', 'dev_id']):
                yield_id = v['yield_dev_id'].tolist()
                yield_name = v['yield_dev_name'].tolist()
                yield_list = list(zip(yield_id, yield_name))
                tmp_dic = {'data_time': k[0],
                           'ent_id': v['ent_id'].tolist()[0],
                           'dev_id': k[1],
                           'dev_name': v['dev_name'].tolist()[0],
                           'ent_name': v['ent_name'].tolist()[0],
                           'industry_type_id': v['industry_type_id'].tolist()[0],
                           'industry_type': v['industry_type'].tolist()[0],
                           'county_id': v['county_id'].tolist()[0],
                           'county_name': v['county_name'].tolist()[0],
                           'town_id': v['town_id'].tolist()[0],
                           'town_name': v['town_name'].tolist()[0],
                           'yield_dev_id_list': json.dumps(yield_list)
                           }
                new_pd = new_pd.append(tmp_dic, ignore_index=True)
            new_pd.reset_index(inplace=True)
            new_pd = new_pd[['data_time', 'ent_id', 'dev_id', 'yield_dev_id_list',
                             'dev_name', 'ent_name', 'industry_type_id', 'industry_type', 'county_id', 'county_name',
                             'town_id',
                             'town_name',
                             ]].drop_duplicates()

            # Replace NaN values with some default integer value, e.g. -1
            new_pd.fillna(-1, inplace=True)

            new_pd = new_pd.astype({'county_id': 'int32', 'ent_id': 'int64',
                                    'dev_id': 'int64'}, copy=True)
            total_rows = len(new_pd)
            for i in range(0, total_rows, 2000):
                self._df_res_to_db(new_pd[i:i + 2000], 'elec_daily_alarm_rule2')

    def _db_util(self):
        database_config = {
            'db_name': 'electricity_data',
            'username': 'electricity_api_service',
            'password': 'GJlfh7&#jg',
            'host': 'mmservice-05.mysql.hotgrid.cn',
            'port': 3306
        }

        conn = pymysql.connect(host=database_config['host'], user=database_config['username'],
                               password=database_config['password'], database=database_config['db_name'])
        cur = conn.cursor()
        return cur, conn

    def _db_res_to_df(self, sql):
        self.conn.ping()
        res_pd = pandas.read_sql(sql, con=self.conn)
        self.conn.close()
        return res_pd

    @retry(tries=3, delay=3000)
    def _df_res_to_db(self, pd, table_name):
        if platform.system() == 'Linux':
            conn1 = 'mysql+pymysql://electricity_data:EF2zUl1GHss3yqay@192.168.195.201:3317/electricity_data'
        else:
            conn1 = 'mysql+pymysql://electricity_data:EF2zUl1GHss3yqay@10.20.7.227:33176/electricity_data'
        conn2 = 'mysql+pymysql://electricity_api_service:GJlfh7&#jg@mmservice-05.mysql.hotgrid.cn:3306/electricity_data'

        def mysql_replace_into(table, conn, keys, data_iter):
            from sqlalchemy.dialects.mysql import insert
            from sqlalchemy.ext.compiler import compiles
            from sqlalchemy.sql.expression import Insert

            @compiles(Insert)
            def replace_string(insert, compiler, **kw):
                s = compiler.visit_insert(insert, **kw)
                s = s.replace("INSERT INTO", "REPLACE INTO")
                return s

            data = [dict(zip(keys, row)) for row in data_iter]
            print(data,"aaa")

            conn.execute(table.table.insert(replace_string=""), data)
        try:
            pd.to_sql(
                name=table_name,
                con=conn1,
                index=False,
                chunksize=500,
                if_exists='append', method=mysql_replace_into)
        except:
            pass
        try:
            pd.to_sql(
                name=table_name,
                con=conn2,
                index=False,
                chunksize=500,
                if_exists='append', method=mysql_replace_into)
        except:
            pass
        # 备份到测试数据库
        # pd.to_sql(name=table_name,
        #           con='mysql+pymysql://electricity:X1w4G3Hdl7VCNqhs@36.110.47.24:3317/electricity_data_test',
        #           index=False,
        #           chunksize=500,
        #           if_exists='append', method=mysql_replace_into)


if __name__ == '__main__':

    da = DailyAlarmPointRule2()
    import platform
    if platform.system() == 'Windows':
        for i in range(2):
            da.run(f"225,{9 - i},0")
    else:
        import pandas as pd

        # 定义起始和结束日期
        start_date = "2024-01-15"
        end_date = "2024-01-16"
        # 生成时间序列，从凌晨6点开始，每隔15分钟
        date_time_list = pd.date_range(start=start_date, end=end_date, freq='15min')

        # 仅选择从凌晨6点开始的时间段
        # 仅选择从凌晨6点30分开始的时间段
        date_time_list = [dt for dt in date_time_list if (dt.hour >= 17 and dt.minute >= 00)]

        # 将datetime对象转化为字符串并存储在新的列表中
        date_time_str_list = [dt.strftime('%Y-%m-%d %H:%M:%S') for dt in date_time_list]
        for i in range(len(date_time_str_list)):
            da.run(f"225,{9 - i},{date_time_str_list[i]}")