from django.conf import settings 


import json
# import psycopg2


'''
    postgresql 查询
'''


class DbQuery:
    pass
#     db_health_sql = '''select * from (   (select query as "long_query_conn",      count(query) over(partition by query) as "long_query_conn_count",     now()-xact_start as long_query_time  from pg_stat_activity  where now()-xact_start>interval '1 hour'  and query !~ '^COPY' and state<>'idle'  order by xact_start limit 1) union  select '无' as "long_query_conn",      0 as "long_query_conn_count",     interval '0 days' as long_query_time from pg_stat_activity where (select count(*) from pg_stat_activity  where now()-xact_start>interval '1 hour'  and query !~ '^COPY' and state<>'idle')=0 ) a   cross join   (  (select  query as "long_lock_query",  count(*) over(partition by query) as " lock_count",      now()-xact_start as " long_lock_time"   from pg_stat_activity where pid in (   select pid from pg_locks where granted='t' and (objid, objsubid) in (    select objid, objsubid from pg_locks where granted='f' group by objid,objsubid)   ) order by query_start limit 1 ) union   select  '无' as "long_lock_query", 0 as " lock_count",  interval '0 days' as "long_lock_time"   from pg_stat_activity where (select count(*) from pg_stat_activity where pid in (   select pid from pg_locks where granted='t' and (objid, objsubid) in (    select objid, objsubid from pg_locks where granted='f' group by objid,objsubid)  ))=0 ) b   cross join  (  (SELECT  first_value(locker_act.query) over (order by locker_act.xact_start desc)  as "lock_object_query", count(*) over (partition by locker_act.xact_start) as "lock_object_count",      current_timestamp - least(query_start,xact_start) as "lock_object_time"  FROM pg_locks locker,pg_stat_activity locker_act, pg_class pc WHERE locker.pid=locker_act.pid  AND NOT locker.pid=pg_backend_pid()     AND application_name<>'pg_statsinfod'     AND locker.relation = pc.oid     AND pc.reltype <> 0  AND current_timestamp - least(query_start,xact_start) > '10 min'::interval  AND locker_act.query NOT ILIKE '%VACUUM%' limit 1 )union  SELECT  '无' as "lock_object_query",0 as "lock_object_count",  interval '0 days' as "lock_object_time" from pg_stat_activity where (select count(*) FROM pg_locks locker,pg_stat_activity locker_act, pg_class pc WHERE locker.pid=locker_act.pid  AND NOT locker.pid=pg_backend_pid()     AND application_name<>'pg_statsinfod'     AND locker.relation = pc.oid     AND pc.reltype <> 0  AND current_timestamp - least(query_start,xact_start) > '10 min'::interval  AND locker_act.query NOT ILIKE '%VACUUM%')=0 ) c'''
#     job_status_sql = '''with channel_type as  (  select distinct type from crm_job_status_trace where type not in ('tags','RET_ATK','wechat','tomirallm','VM_TMEP','tocamtotal','tocamincre','order-cg','order','operation','EC-ARVATO','member','order','point','test')), dates as ( select current_date - s.a as dates from pg_catalog.generate_series(1, 1) as s(a) ), job_status as (  select dates, type, status, last_time, cnt  from  (  select d.dates, c.type, j.status,   to_char(max(to_timestamp(j.create_date||j.start_time, 'yyyyMMddhh24miss')) over(partition by d.dates, c.type, j.status), 'yyyy-MM-dd hh24:mi:ss') last_time,  count(j.status) over(partition by d.dates, c.type, j.status) cnt,   row_number() over(partition by d.dates, c.type, j.status) rn  from channel_type c cross join dates d  left outer join crm_job_status_trace j on j.type = c.type and j.create_date::date = dates  ) t   where t.rn = 1 ), job_success as (  select j.dates, c.type, j.last_time, j.cnt  from channel_type c inner join job_status j on c.type = j.type and j.status = '1' and j.cnt > 0 ), job_unexecuted as (  select j.dates, c.type, coalesce(j.last_time, '无') last_time, j.cnt  from channel_type c left join job_status j on c.type = j.type   where j.cnt = 0 ), job_failure as (  select dates, type, last_time, status, cnt  from job_status  where status <> '1' ) select d.*,s.job_success job_success, u.job_unexecuted job_unexecuted, f.job_failure job_failure from dates d left join ( select dates, json_object_agg(type, last_time) job_success from job_success  group by dates ) s on d.dates=s.dates left join  ( select dates, json_object_agg(type, last_time) job_unexecuted from job_unexecuted group by dates ) u on d.dates=u.dates  left join ( select dates, json_object_agg(type||'_status_'||status,last_time) job_failure   from job_failure  group by dates ) f on d.dates=f.dates'''
#     db_shard_sql = '''select count(*) as shard_count from pg_dist_shard_placement where shardid in (  select shardid from pg_dist_shard_placement   where shardstate = 3   group by shardid   having count(shardid) = 2 );'''

#     def __init__(self, db_name):
       
#         params = settings.DATABASES[db_name]
        
#         conn = psycopg2.connect(database=params['NAME'],
#             user=params['USER'],
#             password=params['PASSWORD'],
#             host=params['HOST'],
#             port=params['PORT'])

#         self.conn = conn
    
#         cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)
#         self.cur = cur


#     def query_lock_status(self):
#         self.cur.execute(self.db_health_sql)
#         rows = self.cur.fetchall()
         
#         return  self.query_to_dict(rows)
#         self.close()
#         # return [{"long_query_conn_count":10, "lock_count": 1,"lock_object_count":10}]


#     def query_shard_status(self):
#         self.cur.execute(self.db_shard_sql)
#         rows = self.cur.fetchall()
         
#         return  self.query_to_dict(rows)
#         self.close()
#         # return [{"shard_count":10}]


#     def query_job_status(self): 
#         self.cur.execute(self.job_status_sql)
#         rows = self.cur.fetchall()
         
#         return self.query_to_dict(rows)
#         # a = json.dumps({ "ba" : "2021-03-04 23:27:08", "base" : "2021-03-04 22:56:59", "MARS" : "2021-03-04 23:30:29", "tomirdbm" : "2021-03-04 23:21:56" })
#         # return [{"job_success": a, "job_unexecuted": a, "job_failed":a}]
#         self.close()


#     def close(self):
#         self.cur.close()
#         self.conn.close()


#     def query_to_dict(self, rows):
        
#         result = []
#         for i in rows:
#             row = {}
#             for j in i.items():
#                 row[j[0]] =  j[1]
#             result.append(row)
#         return result


# if __name__ == "__main__":
#     query = DbQuery('default')
#     query.query_db_health()