#!/usr/bin/env python2
# -*- coding: utf-8 -*-

import os
import csv
from Ump import utils
from Ump.objs.db import models
from Ump.objs.session_wrapper import enable_log_and_session, _sw
from Ump.schedule.jobs.base import JobBase
from Ump.common import exception
from Ump.schedule.utils import log_info_run

class ReportIopsJob(JobBase):
    def __init__(self):
        self.abs_path = os.path.dirname(os.path.realpath(__file__))
        self.report_path = '%s/tmp/ump/report/' % utils.install_path
        super(ReportIopsJob, self).__init__()

    @log_info_run()
   # @enable_log_and_session(resource='statistics', event='create')
    def run(self, context):
        """
        :param:: context: the forms are as following:
               {'report_name':‘user-defined name’,
                'report_type': 'pool or volume or default',
                'pool_name [,volume_name]': 'the name of pool or volume',
                'from_time': '',
                'end_time': ''}
        """
        report_type = context.get('report_type')
        report_from_time = context.get('start_time')
        report_end_time = context.get('end_time')
        report_interval_time = context.get('interval_time')
        schedule_job_id = context.get('schedule_job_id')
        report_owner = context.get('owner')
        report_name = 'job_id_' + str(schedule_job_id) + '_' + context.get('report_name')
        schedule_job = _sw.get_one(models.ScheduleJob, id_or_spec=schedule_job_id)

        if report_type == 'volume':
            volume_id = context.get('type_id')
            volume = models.Volume.query.get(volume_id)
            volume_path = volume.path

            stmt = "select * from volume_iops where created_at > '%s' and created_at < '%s' and volume_id = %s" % \
                   (report_from_time, report_end_time, volume.id)
            volume_iops = models.VolumeIops.query.from_statement(stmt).all()
            volume_iops_list = [[x.created_at, x._iops, x._read, x._write, x._in_out, x._in, x._out]
                                for x in volume_iops][::int(report_interval_time) / 5]

            if not volume_iops_list:
                #_logger.update_props(oplog_obj='ReportJob')
                schedule_job.update({'is_finished': True, 'status': 'failed'})
                raise exception.ScheduleJobFailed('No data can be reported!')

            statistics_result = self._do_statistics(volume_iops_list)

            record_title = 'IOPS and Throughput Report table for volume [%s] from %s to %s' % \
                       (volume_path, report_from_time, report_end_time)
            self._write_csvfile(
                report_name,
                record_title,
                volume_iops_list,
                statistics_result,
            )

            report_values = {
                'name': report_name,
                'schedule_job_id': schedule_job_id,
                'owner': report_owner,
                'type': 'iops',
            }
            models.ScheduleJobReport(report_values).save()
            schedule_job.update({'is_finished': True, 'status': 'success'})

        elif report_type == 'pool':
            pool_id = context.get('type_id')
            pool = _sw.db_pool(pool_id)
            pool_path = pool.path

            stmt = "select * from pool_iops where created_at > '%s' and created_at < '%s' and pool_id = %s" % \
                   (report_from_time, report_end_time, pool.id)
            pool_iops = models.PoolIops.query.from_statement(stmt).all()
            pool_iops_list = [[x.created_at, x._iops, x._read, x._write, x._in_out, x._in, x._out]
                                for x in pool_iops][::int(report_interval_time) / 5]

            if not pool_iops_list:
                #_logger.update_props(oplog_obj='ReportJob')
                #_logger.update_props(detail='No data can be reported!')
                schedule_job.update({'is_finished': True, 'status': 'failed'})
                raise exception.ScheduleJobFailed

            statistics_result = self._do_statistics(pool_iops_list)

            record_title = 'IOPS and Throughput Report table for pool [%s] from %s to %s' % \
                       (pool_path, report_from_time, report_end_time)
            self._write_csvfile(
                report_name,
                record_title,
                pool_iops_list,
                statistics_result,
            )

            report_values = {
                'name': report_name,
                'schedule_job_id': schedule_job_id,
                'owner': report_owner,
                'type': 'iops',
            }
            models.ScheduleJobReport(report_values).save()
            schedule_job.update({'is_finished': True, 'status': 'success'})

        elif report_type == 'host':
            host_id = context.get('type_id')
            host = _sw.db_host(host_id)
            host_name = host.name
            host_ip = host.ip

            stmt = "select * from host_iops where created_at > '%s' and created_at < '%s' and host_id = %s" % \
                   (report_from_time, report_end_time, host.id)
            host_iops = models.HostIops.query.from_statement(stmt).all()
            host_iops_list = [[x.created_at, x._iops, x._read, x._write, x._in_out, x._in, x._out]
                              for x in host_iops][::int(report_interval_time) / 5]

            if not host_iops_list:
                #_logger.update_props(oplog_obj='ReportJob')
                #_logger.update_props(detail='No data can be reported!')
                schedule_job.update({'is_finished': True, 'status': 'failed'})
                raise exception.ScheduleJobFailed

            statistics_result = self._do_statistics(host_iops_list)
            record_title = 'IOPS and Throughput Report table for host [%s %s] from %s to %s' % \
                       (host_name, host_ip, report_from_time, report_end_time)
            self._write_csvfile(
                report_name,
                record_title,
                host_iops_list,
                statistics_result,
            )
            report_values = {
                'name': report_name,
                'schedule_job_id': schedule_job_id,
                'owner': report_owner,
                'type': 'iops',
            }
            models.ScheduleJobReport(report_values).save()
            schedule_job.update({'is_finished': True, 'status': 'success'})

        else:
            stmt = "select * from volume_iops where created_at > '%s' and created_at < '%s'" % \
                   (report_from_time, report_end_time)
            iops = models.VolumeIops.query.from_statement(stmt).all()
            iops_list = [[x.created_at, x._iops, x._read, x._write, x._in_out, x._in, x._out]
                              for x in iops][::int(report_interval_time) / 5]

            if not iops_list:
                #_logger.update_props(oplog_obj='ReportJob')
                #_logger.update_props(detail='No data can be reported!')
                schedule_job.update({'is_finished': True, 'status': 'failed'})
                raise exception.ScheduleJobFailed

            statistics_result = self._do_statistics(iops_list)

            record_title = 'IOPS and Throughput Report table for the cluster from %s to %s' % \
                           (report_from_time, report_end_time)
            self._write_csvfile(
                report_name,
                record_title,
                iops_list,
                statistics_result,
            )
            report_values = {
                'name': report_name,
                'schedule_job_id': schedule_job_id,
                'owner': report_owner,
                'type': 'iops',
            }
            models.ScheduleJobReport(report_values).save()
            schedule_job.update({'is_finished': True, 'status': 'success'})

    def _write_csvfile(self, file_name, record_title, records,
                       statistics_result):
        if not os.path.exists(self.report_path):
            os.system('mkdir %s' % self.report_path)
        export_file = os.path.join(self.report_path, '%s.csv' % file_name)
        with open(export_file, 'wb') as csv_file:
            writer = csv.writer(csv_file)
            writer.writerow([record_title])
            writer.writerow(('Date_and_time', 'iops', 'read', 'write', 'throughput(MB/s)', 'in(MB/s)', 'out(MB/s)'))
            writer.writerow(statistics_result[0])
            writer.writerow(statistics_result[1])
            writer.writerow(statistics_result[2])
            for record in records:
                writer.writerow(record)

    def _do_statistics(self, content):
        trans_content = map(list, zip(*content))
        iops_list = trans_content[1]
        read_list = trans_content[2]
        write_list = trans_content[3]
        throughput_list = trans_content[4]
        in_list = trans_content[5]
        out_list = trans_content[6]

        statistics_max = ['Max',
                          max(iops_list),
                          max(read_list),
                          max(write_list),
                          max(throughput_list),
                          max(in_list),
                          max(out_list)
                          ]

        statistics_min = ['Min',
                          min(iops_list),
                          min(read_list),
                          min(write_list),
                          min(throughput_list),
                          min(in_list),
                          min(out_list)
                          ]

        statistics_average = ['Average',
                              sum(iops_list)/len(content),
                              sum(read_list) / len(content),
                              sum(write_list) / len(content),
                              round(float(sum(throughput_list) / len(content)), 3),
                              round(float(sum(in_list) / len(content)), 3),
                              round(float(sum(out_list) / len(content)), 3)
                              ]

        result = (statistics_max, statistics_min, statistics_average)
        return result


if __name__ == "__main__":
    iops_job = ReportIopsJob()
    iops_job.run(context={'report_name': '1709',
                          'start_time': '2017-3-12 14:30',
                          'end_time': '2017-3-15 17:09',
                          'interval_time': 5,
                          'report_type': 'pool',
                          'type_id': 1,
                          'owner': 'cinder',
                          'schedule_job_id': 25,
                          })

