#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
    log_uploader.py
    ~~~~~~~

    A LogUploader class to upload log to oss.

    :author: Kenneth Teng
    :copyright: (c) 2016, Tungee
    :date created: 2017-02-18
    :python version: 2.7
"""

import datetime
import json
import logging
import subprocess
import os
import re
import time
import copy

import requests
from oss_upload import upload_to_oss
from apscheduler.schedulers.blocking import BlockingScheduler
from config import config

'''
我们最终要运行的就是这个文件，
config.py是做一些配置
oss_upload.py可以看作无须改动的黑盒
'''


class LogUploader(object):
    """
    ## Uploader class
    a class to handle spider result and logs uploading
    """

    def __init__(
        self, log_output_path, op_conf
    ):
        self.log_output_path = log_output_path
        self.op_conf = op_conf
        self.logger = logging
        self.TMP_FAILED_STATS_PATH = 'tmp_failed_stats.json'
        self.TMP_FAILED_CALLBACK_PATH = 'tmp_failed_callback.json'
        self.init_log_file()

    def lock_file(self, file_path):
        """lock a file"""
        lock_file_path = file_path + '.lock'
        with open(lock_file_path, 'w+') as f:
            f.write('')

    def unlock_file(self, file_path):
        """Unlock a file"""
        lock_file_path = file_path + '.lock'
        if os.path.exists(lock_file_path):
            os.remove(lock_file_path)

    def is_file_locked(self, file_path):
        """Unlock a file"""
        lock_file_path = file_path + '.lock'
        return os.path.exists(lock_file_path)

    def get_log_files(self):
        """Get a list of log files to upload"""
        # # 存放一行行数据的文件的目录
        fs = os.listdir(self.log_output_path)
        result = []
        for f in fs:
            # 具体result文件路径
            fp = os.path.join(self.log_output_path, f)
            if re.match('^.+\.log\..+$', f) and os.path.isfile(fp):
                spider_id = re.sub('\.log\..+$', '', f)
                result.append(fp)
                self.logger.info('Found Log: ' + f)
                self.record_log_f.write('Found Log: {}\n'.format(f))
        return result

    def do_some_tj(self,log_file_path):
        '''对一个venom数据文件做一些统计'''
        tj_log_path = './data/ok/send.log'
        w_f = open(tj_log_path,'a')
        w_f.write('{} send {}\n'.format(log_file_path,datetime.datetime.now()) )
        w_f.close()

    def init_log_file(self):
        log_output_path = config['log_output_path']
        if not os.path.exists(log_output_path):
            raise Exception('{} not exists'.format(log_output_path))
        send_log_dir = log_output_path + '/ok'
        send_log_file = send_log_dir + '/send.log'
        record_log_file = send_log_dir + '/record.log'
        if not os.path.exists(send_log_dir):
            os.makedirs(send_log_dir)
        self.send_log_f = open(send_log_file,'a')
        self.record_log_f = open(record_log_file,'a')





    def run(self):
        """
        Begin upload
        读取result下的数据文件，上传到oss，然后再删除，再新建解析任务去解析
        """
        self.logger.info('Load Dates Finished')
        self.record_log_f.write('start one loop send...{}\n'.format(datetime.datetime.now()))
        # 遍历所有数据绝对路径文件名
        for log in self.get_log_files():
            success = False
            tries = 0
            api = 'http://{}:{}/api/spider/{}/log/callback'.format(
                self.op_conf['host'], self.op_conf['port'], self.op_conf['spider_id']
            )
            # 从绝对路径提取文件名
            log_file_name = os.path.basename(log)
            self.record_log_f.write('log_file_name: {}\t{}\n'.format(log_file_name,datetime.datetime.now()))
            # 上传数据文件到oss
            oss_flag = upload_to_oss(log, log_file_name)
            if oss_flag:
                print('upload success: {}\t{}'.format(log,datetime.datetime.now()))
                while not success and tries < 3:
                    try:
                        # 新建解析任务
                        res = requests.post(
                            url=api,
                            data={
                                'log_oss_url': 'log/{}'.format(log_file_name),
                            },
                            timeout=30
                        )
                        success = res.status_code == 200
                        if not success:
                            self.logger.warning(res.text)
                            self.record_log_f.write(
                                'error_info: {}\t{}\n'.format(res.text, datetime.datetime.now()))

                    except Exception as e:
                        self.logger.exception(e)
                        self.record_log_f.write(
                            'exception_info: {}\t{}\n'.format(e, datetime.datetime.now()))
                    tries += 1
                if success:
                    if os.path.exists(log):
                        self.do_some_tj(log_file_name)
                        os.remove(log)
                    self.logger.info('Callback Success')
                    self.record_log_f.write(
                        'Callback Success: {}\t{}\n'.format(log_file_name, datetime.datetime.now()))
                else:
                    self.logger.info('Callback Failed')
                    self.record_log_f.write(
                        'Callback Failed: {}\t{}\n'.format(log_file_name, datetime.datetime.now()))
            else:
                print('upload failed: {}'.format(log))
                self.record_log_f.write(
                    'upload Failed: {}\t{}\n'.format(log_file_name, datetime.datetime.now()))
            time.sleep(60*4)

        self.logger.info('Uploader Ended')




def create_parse_task():
    inst = LogUploader(
        log_output_path=config['log_output_path'],
        op_conf=config['op'],
    )
    inst.run()




if __name__ == '__main__':
    create_parse_task()
    # create_parse_task()
    # scheduler = BlockingScheduler()
    # scheduler.add_job(func=create_parse_task, trigger='cron', minute='*/3', id='dd_run')
    # scheduler.start()
