# encoding: utf-8  
'''
Created on 2018年6月15日
description:数据收集器 for pgsql
@author: zhujianhua
'''
import requests
import time
import json
import sys
sys.path.append('./../helper')
# import InfoMonConstants
# from ConnectionHelper import ConnectionHelper
from helper import InfoMonConstants
from helper.ConnectionHelper import ConnectionHelper
from threading import Thread, Condition

class Collector:
    #收集数据列表
    uploadData = []
    #时间戳
    ts = int(time.time())
    
    def run(self):
        self.collect()
        self.send()

    #连接pgsql的数量，session_count
    def getSessionCount(self, conn):
        results = conn.getValue("SELECT state,count(*)\
        FROM pg_stat_activity WHERE pid!=pg_backend_pid()\
        group by state")
        for line in results:
            self.uploadData.append({
                "endpoint": InfoMonConstants.ENDPOINT,
                "metric": "sessionCount_"+line[0],
                "timestamp": self.ts,
                "step": 60,
                "value": line[1],
                "counterType": "GAUGE",
                "tags": "name=session_count",
            })

    def getSessionType(self, conn):
        results = conn.getValue("SELECT state,count(*)\
        FROM pg_stat_activity\
        WHERE pid!=pg_backend_pid()\
        group by state")
        for line in results:
            self.uploadData.append({
                "endpoint": InfoMonConstants.ENDPOINT,
                "metric": "sessiontypeCount_" + line[0],
                "timestamp": self.ts,
                "step": 60,
                "value": line[1],
                "counterType": "GAUGE",
                "tags": "name=session_type",
            })

    #database缓存命中率与空间大小和commit比例
    def getDatabaseStatus(self, conn):
        results = conn.getValue("SELECT a.datname,\
        pg_size_pretty(pg_database_size(a.datid)) AS size,\
        CAST(xact_commit / (xact_rollback + xact_commit + 0.000001) * 100.0 AS NUMERIC(5, 2)) AS success,\
		CAST(blks_hit / (blks_read + blks_hit + 0.000001) * 100.0 AS NUMERIC(5, 2)) AS cache\
        FROM pg_stat_database a\
        WHERE datname not in ('template0','template1')\
        ORDER BY pg_database_size(a.datid) DESC")
        for line in results:
            self.uploadData.append({
                "endpoint": InfoMonConstants.ENDPOINT,
                "metric": "database_" + line[0] + "_size",
                "timestamp": self.ts,
                "step": 60,
                "value": line[1],
                "counterType": "GAUGE",
                "tags": "name=database_size",
            })
            self.uploadData.append({
                "endpoint": InfoMonConstants.ENDPOINT,
                "metric": "database_" + line[0] + "_hint",
                "timestamp": self.ts,
                "step": 60,
                "value": line[3],
                "counterType": "GAUGE",
                "tags": "name=database_cache",
            })
            self.uploadData.append({
                "endpoint": InfoMonConstants.ENDPOINT,
                "metric": "database_" + line[0] + "_commit_ratio",
                "timestamp": self.ts,
                "step": 60,
                "value": line[2],
                "counterType": "GAUGE",
                "tags": "name=database_cache",
            })

    def getXactDiff(self, conn):
        sql = "select t.datname,sum(xact_commit),sum(xact_rollback),sum(xact_rollback)+sum(xact_commit),sum(blks_read)\
        from pg_stat_database t \
        where datname not in ('template0','template1')\
        group by datname"
        old_results = conn.getValue(sql)
        # print(time.time())
        time.sleep(1)
        # print(time.time())
        new_results = conn.getValue(sql)
        #diff = [old_results[i][1] - new_results[i][1] for i in range(len(old_results))]
        for i in range(len(new_results)):
            # print("newcommit value "+str(new_results[i][0])+str(new_results[i][1]))
            # print("oldcommit value "+str(old_results[i][0])+str(old_results[i][1]))
            self.uploadData.append({
                "endpoint": InfoMonConstants.ENDPOINT,
                "metric": "XactCommit_" + new_results[i][0] + "_per_seconds",
                "timestamp": self.ts,
                "step": 60,
                "value": new_results[i][1] - old_results[i][1],
                "counterType": "GAUGE",
                "tags": "name=database_per_commit",
            })
            self.uploadData.append({
                "endpoint": InfoMonConstants.ENDPOINT,
                "metric": "XactRollback_" + new_results[i][0] + "_per_seconds",
                "timestamp": self.ts,
                "step": 60,
                "value": new_results[i][2] - old_results[i][2],
                "counterType": "GAUGE",
                "tags": "name=database_per_rollback",
            })
            self.uploadData.append({
                "endpoint": InfoMonConstants.ENDPOINT,
                "metric": "XactTransactions_" + new_results[i][0] + "_per_seconds",
                "timestamp": self.ts,
                "step": 60,
                "value": new_results[i][3] - old_results[i][3],
                "counterType": "GAUGE",
                "tags": "name=database_per_transactions",
            })
            self.uploadData.append({
                "endpoint": InfoMonConstants.ENDPOINT,
                "metric": "XactPhyBlocks_" + new_results[i][0] + "_per_seconds",
                "timestamp": self.ts,
                "step": 60,
                "value": new_results[i][4] - old_results[i][4],
                "counterType": "GAUGE",
                "tags": "name=database_PhyRead_per_blocks",
            })

    def getSeq(self, helper):
        dbName = helper.getDbName()
        for line in dbName:
            # print(line[0])
            myConn = helper.getConnection(line[0])
            # print(myConn)
            getSeqNameSql = "select rolname,nspname,relname \
                from pg_authid t1 , pg_class t2 , pg_namespace t3 \
                where t1.oid=t2.relowner and t2.relnamespace=t3.oid and t2.relkind='S'"
            seqResult = helper.getResult(myConn, getSeqNameSql)
            for row in seqResult:
                # print(row)
                seqSql = "select (max_value-last_value)/increment_by from {0}.{1} where not is_cycled".format(row[1], row[2])
                # print(seqSql)
                results = helper.getResult(myConn, seqSql)
                for available in results:
                    self.uploadData.append({
                        "endpoint": InfoMonConstants.ENDPOINT,
                        "metric": "SequenceAvailale_" + row[0] + "_"+row[2],
                        "timestamp": self.ts,
                        "step": 60,
                        "value": available[0],
                        "counterType": "GAUGE",
                        "tags": "name=SequenceAvailable",
                    })
            myConn.close()

    def getLongTransaction(self, conn):
        transactionsql = "SELECT datname, usename, query, xact_start, \
          now() - xact_start xact_duration, query_start, now() - query_start query_duration, STATE \
          FROM pg_stat_activity WHERE STATE <>'idle' and now()-xact_start > interval '{0} SECOND'\
          ORDER BY\
          xact_start limit 1".format(InfoMonConstants.LONGTIME)
        result = conn.getValue(transactionsql)
        for line in result:
            self.uploadData.append({
                "endpoint": InfoMonConstants.ENDPOINT,
                "metric": "LongTransaction_" + line[0] + "_" + line[1],
                "timestamp": self.ts,
                "step": 60,
                "value": round(line[4].total_seconds()),
                "counterType": "GAUGE",
                "tags": "name=longTransaction_seconds",
            })

    #获取索引的大小
    def getIndexUsage(self, helper):
        dbName = helper.getDbName()
        for line in dbName:
            myConn = helper.getConnection(line[0])
            getIndexUsageSql = "SELECT current_database(),A.rolname,nspname,relname,\
                round(100 * pg_relation_size ( indexrelid ) / pg_relation_size ( indrelid )) / 100 AS index_ratio,\
                pg_size_pretty (pg_relation_size ( indexrelid )) AS index_size,\
                pg_size_pretty (pg_relation_size ( indrelid )) AS table_size\
                FROM\
                  pg_index I\
	            LEFT JOIN pg_class C ON ( C.oid = I.indexrelid )\
                LEFT JOIN pg_namespace N ON ( N.oid = C.relnamespace )\
	            LEFT JOIN pg_authid A ON ( C.relowner = A.oid )\
                WHERE\
	            nspname NOT IN ( 'pg_catalog', 'information_schema', 'pg_toast' )\
	            AND C.relkind = 'i'\
	            AND pg_relation_size ( indrelid ) > 0\
                order by pg_relation_size(indexrelid) desc limit 10"
            IndexUsageResult = helper.getResult(myConn, getIndexUsageSql)
            for row in IndexUsageResult:
                self.uploadData.append({
                    "endpoint": InfoMonConstants.ENDPOINT,
                    "metric": "IndexUsage_" + row[0] + "_" + row[3],
                    "timestamp": self.ts,
                    "step": 60,
                    "value": row[5],
                    "counterType": "GAUGE",
                    "tags": "name=IndexUsage",
                })

    def getDatabaseUsage(self, conn):
        databaseUsagesql = "select datname,pg_size_pretty(pg_database_size(datname))\
            from pg_database\
            where datname not in ('template0','template1')\
            group by datname;"
        results = conn.getValue(databaseUsagesql)
        for line in results:
            print(line)
            self.uploadData.append({
                "endpoint": InfoMonConstants.ENDPOINT,
                "metric": "DatabaseUsage_" + line[0],
                "timestamp": self.ts,
                "step": 60,
                "value": line[1],
                "counterType": "GAUGE",
                "tags": "name=IndexUsage",
            })

    def getTableSize(self, helper):
        dbName = helper.getDbName()
        for line in dbName:
            myConn = helper.getConnection(line[0])
            # print(myConn)
            getTableSizeSql = "select current_database(),table_name,pg_size_pretty(pg_relation_size(table_schema||'.'||table_name)) as size_bytes\
              from information_schema.tables\
              where table_schema not in ('pg_catalog', 'information_schema')\
              order by pg_relation_size(table_schema||'.'||table_name) desc\
              LIMIT 10"
            tableSizeResult = helper.getResult(myConn, getTableSizeSql)
            for row in tableSizeResult:
                self.uploadData.append({
                    "endpoint": InfoMonConstants.ENDPOINT,
                    "metric": "TableSizeUsage_" + row[0]+"_" + row[1],
                    "timestamp": self.ts,
                    "step": 60,
                    "value": row[2],
                    "counterType": "GAUGE",
                    "tags": "name=IndexUsage",
                })

    def getLockStatus(self, conn):
        LockStatusSql = "SELECT distinct locked_act.datname,\
            locked.pid AS locked_pid,\
             locker.pid AS locker_pid,\
             locked_act.usename AS locked_user,\
             locker_act.usename AS locker_user,\
             locked.virtualtransaction,\
             locked.transactionid,\
             locked.locktype,\
             now() - locker_act.xact_start\
            FROM\
             pg_locks locked,\
             pg_locks locker,\
             pg_stat_activity locked_act,\
             pg_stat_activity locker_act\
            WHERE\
             locker.GRANTED = TRUE\
             AND locked.GRANTED = FALSE\
             AND locked.pid = locked_act.pid\
             AND locker.pid = locker_act.pid\
             AND ( locked.virtualtransaction = locker.virtualtransaction OR locked.transactionid = locker.transactionid )"
        result = conn.getValue(LockStatusSql)
        for line in result:
            if (line[1] != line[2]):
                self.uploadData.append({
                    "endpoint": InfoMonConstants.ENDPOINT,
                    "metric": "Locker_" + line[0] + "_" + line[3] + "_pid_" + str(line[2]),
                    "timestamp": self.ts,
                    "step": 60,
                    "value": round(line[8].total_seconds()),
                    "counterType": "GAUGE",
                    "tags": "name=Locker_info_seconds",
                })
            else:
                self.uploadData.append({
                    "endpoint": InfoMonConstants.ENDPOINT,
                    "metric": "Locked_" + line[0] + "_" + line[3] + "_pid_" + str(line[1]),
                    "timestamp": self.ts,
                    "step": 60,
                    "value": round(line[8].total_seconds()),
                    "counterType": "GAUGE",
                    "tags": "name=Locked_info_seconds",
                })

    #数据收集
    def collect(self):
        connHelper = ConnectionHelper(InfoMonConstants.DBNAME)

        #for thread
        threads = []
        t1 = Thread(target=self.getSessionCount, args=(connHelper,))
        t2 = Thread(target=self.getSessionType, args=(connHelper,))
        t3 = Thread(target=self.getDatabaseStatus, args=(connHelper,))
        t4 = Thread(target=self.getXactDiff, args=(connHelper,))
        t5 = Thread(target=self.getSeq, args=(connHelper,))
        t6 = Thread(target=self.getLongTransaction, args=(connHelper,))
        t7 = Thread(target=self.getIndexUsage, args=(connHelper,))
        t8 = Thread(target=self.getDatabaseUsage, args=(connHelper,))
        t9 = Thread(target=self.getTableSize, args=(connHelper,))
        t10 = Thread(target=self.getLockStatus, args=(connHelper,))

        threads.append(t1)
        threads.append(t2)
        threads.append(t3)
        threads.append(t4)
        threads.append(t5)
        threads.append(t6)
        threads.append(t7)
        threads.append(t8)
        threads.append(t9)
        threads.append(t10)

        for t in threads:
            # t.setDaemon(True)
            t.start()
        for t in threads:
            t.join()
        #end thread

        for line in self.uploadData:
            print(line)

        connHelper.closeConn()
    
    #发送数据到transfer
    def send(self):
        if 0 != len(self.uploadData):
            r = requests.post(InfoMonConstants.FALCON_CLINENT, data=json.dumps(self.uploadData))
            print(r.text)



