# -*- coding: utf-8 -*-

from datetime import datetime
from datascrapy.mysqldbutil import DbConnect

import copy


class DatascrapyPipeline(object):
    def process_item(self, item, spider):
        return item


class DbrowScrapyPipeline(object):

    def process_item(self, item, spider):
        if spider.name == 'dbrow':
            item_copy = copy.deepcopy(item)
            self.do_insert(item_copy)
        return item

    @staticmethod
    def do_insert(item_copy):
        day = datetime.now()
        try:
            insersql = "insert  into dbms_row_show (sys_name,db_name,table_name,row_num,data_size,fragment_rate,ip,gmt_create) values (%s,%s,%s,%s,%s,%s,%s,%s)"
            data = (
                item_copy['sysName'], item_copy['dbName'],
                item_copy['tableName'], item_copy['rowNums'],
                item_copy['dataSize'], item_copy['fragmentRate'],
                item_copy['ip'], day)
            db_connect = DbConnect()
            db_connect.execute(insersql, data)
            db_connect.commit_connect()
            return item_copy
        except Exception as ex:
            print
            ex


class DbSlowSqlScrapyPipeline(object):

    def process_item(self, item, spider):
        if spider.name == 'slowsql':
            item_copy = copy.deepcopy(item)
            self.do_insert(item_copy)
        return item

    @staticmethod
    def do_insert(item_copy):
        day = datetime.now()
        try:
            insersql = "insert  into dbms_slowsql_show (sys_name,sql_id,start_time,newest_start_time,exe_num,sel_sum_time,sel_the_shortest,sel_max_row,sel_the_longest,sql_context,ip,gmt_create) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
            data = (
                item_copy['sysName'], item_copy['sqlId'],
                item_copy['startTime'],
                item_copy['newestStartTime'], item_copy['exeNum'],
                item_copy['selSumTime'],
                item_copy['selTheShortest'], item_copy['selMaxRow'],
                item_copy['selTheLongest'],
                item_copy['sqlContext'],
                item_copy['ip'], day)
            db_connect = DbConnect()
            db_connect.execute(insersql, data)
            db_connect.commit_connect()
        except Exception as ex:
            print
            ex


class TableInfoScrapyPipeline(object):

    def process_item(self, item, spider):
        if spider.name == 'tableinfo':
            item_copy = copy.deepcopy(item)
            self.do_insert(item_copy)
        return item

    @staticmethod
    def do_insert(item_copy):
        day = datetime.now()
        try:
            insersql = "insert  into dbms_table_info (db_name,table_name,engine_name,table_row,table_size,gmt_create) values (%s,%s,%s,%s,%s,%s)"
            data = (
                item_copy['dbName'], item_copy['tableName'],
                item_copy['engineName'],
                item_copy['tableRow'], item_copy['tableSize'], day)
            db_connect = DbConnect()
            db_connect.execute(insersql, data)
            db_connect.commit_connect()
        except Exception as ex:
            print
            ex


class DbFileInfoScrapyPipeline(object):

    def process_item(self, item, spider):
        if spider.name == 'dbfileinfo':
            item_copy = copy.deepcopy(item)
            self.do_insert(item_copy)
        return item

    @staticmethod
    def do_insert(item_copy):
        day = datetime.now()
        try:
            insersql = "insert  into dbms_slowsql_show  (db_name,ip,db_directory,total_size,use_size,free_size,user_age,gmt_create) values (%s,%s,%s,%s,%s,%s,%s,%s)"
            data = (
                item_copy['dbName'], item_copy['ip'], item_copy['directory'],
                item_copy['totalSize'],
                item_copy['userSize'], item_copy['freeSize'],
                item_copy['userAge'], day)
            db_connect = DbConnect()
            db_connect.execute(insersql, data)
            db_connect.commit_connect()
            return item_copy
        except Exception as ex:
            print
            ex
