
# ref: https://www.cnblogs.com/ccxikka/p/9637545.html
from cffi import FFI
import os
import sys
import queue
import threading
import logging
import time
import re
import json

from unqlite import UnQLite 
from cfg_reader import cfg_reader
import utility

ffi = FFI()

ffi.cdef("""
    void global_init(const char* ip, const char* username, const char* passwd);
    int easy_init();
    void easy_cleanup();
    void global_cleanup();
    int upload_file(const char *remotepath, const char *localpath);
    int upload_file2(const char *remotepath, const char *localpath, int timeout, int tries, int overlap);
""")

LIB_NAME = 'curl_ftp_upload.dll'
PD_UPLOAD_DB = 'fd_upload.db'
TRAVERSE_INTERVAL = 60*60  # 一小时轮询一次根目录

###########################
class PDUpload():
    def __init__(self):
        self.ip = None
        self.username= None
        self.passwd  = None
        self.db = UnQLite(PD_UPLOAD_DB) 
        self.threads = [None]*10
        
        self.retry_queue = queue.Queue() # 重试队列，存放文件名和已重试次数
        self.file_queue = queue.Queue() 
        self.retry_thread_started = False
        self.traverse_timer = None
        self.retry_thread = None
        self.is_running = None     # 系统已正常工作
        
        # ftp相关
        self.lib = None
        self.timeout = 60 # ftp库上传文件超时时间
        self.tries = 3    # ftp库自身重连次数
        self.overlap = 0  # 如若上传失败，是否覆盖
    
    def load_library(self):
        lib_path = LIB_NAME
        if os.path.exists(lib_path):
           self.lib = ffi.dlopen(lib_path)
           return True
        else:
            lib_path = utility.search_file(".", lib_path)
            if lib_path:
                self.lib = ffi.dlopen(lib_path)
                return True
            else:
                return False
        return False
        
        
    # 重试上传开启一个单独的线程
    def start_retry_upload(self):
        self.retry_thread = threading.Thread(target=self.retry_upload)
        self.retry_thread.setDaemon(True)
        self.retry_thread.start()
        
    # 定时扫描线程
    def start_traverse_rootfolder(self):
        self.traverse_rootfolder_thread = threading.Thread(target=self.traverse_rootfolder)
        self.traverse_rootfolder_thread.setDaemon(True)
        self.traverse_rootfolder_thread.start()
        
    def retry_upload(self):
        ret = self.lib.easy_init()
        if (ret != 0):
            return
        self.retry_thread_started = True
        
        while True:
            f = self.retry_queue.get()
            if f is None:
                continue
            
            try_count = f["try_count"]
            localpath = f["localpath"]
            remotepath = f["remotepath"]
            if not os.path.exists(localpath):
                continue
            
            if try_count == self.tries:
                logging.info('file: %s retry upload failed!', localpath)
                continue
            else:
                remote_path = ffi.new("char[]", remotepath.encode())
                local_path = ffi.new("char[]", localpath.encode("gbk")) ##解决中文路径乱码问题
                ret = self.lib.upload_file2(remote_path, local_path, self.timeout, self.tries, self.overlap)
                if (ret == 0):
                    m_time = os.path.getmtime(localpath)
                    m_time = utility.timestamp_to_time(m_time)
                    with self.db.transaction():
                        self.db[localpath] = json.dumps({'status':'1', 'mtime':m_time})  #1标识成功状态
                        #self.db[localpath] = "1"
                    logging.info('file: %s retry upload succeed!', localpath)
                else:
                    f["try_count"] = try_count + 1
                    self.retry_queue.put(f)
            time.sleep(1) # 减少cpu占用消耗
                    
            self.retry_thread_started = False
            self.retry_queue.queue.clear()
            self.lib.easy_cleanup()
            
    def upload_file(self):  
        ret = self.lib.easy_init()
        if (ret != 0):
            logging.error('lib.easy_init failed')
            return

        while True:            
            f = self.file_queue.get()
            if f is None:
                continue

            localpath = f["localpath"]
            remotepath = f["remotepath"]
            if not os.path.exists(localpath):
                continue
            
            m_time = os.path.getmtime(localpath)
            m_time = utility.timestamp_to_time(m_time)

            remote_path = ffi.new("char[]", remotepath.encode())
            local_path = ffi.new("char[]", localpath.encode("gbk")) ##解决中文路径乱码问题
            ret = self.lib.upload_file2(remote_path, local_path, self.timeout, self.tries, self.overlap) 
            if (ret != 0):
                with self.db.transaction():
                    self.db[localpath] = json.dumps({'status':'0', 'mtime':m_time})  #0标识失败状态
                    #self.db[localpath] = "0"
                if self.retry_thread_started:
                    self.retry_queue.put({"try_count":0, "localpath":localpath, "remotepath":remotepath})
                logging.error('file: %s upload failed!', localpath)
            else:
                with self.db.transaction():
                    self.db[localpath] = json.dumps({'status':'1', 'mtime':m_time})  #1标识成功状态
                    #self.db[localpath] = "1"
                logging.info('file: %s upload succeed!', localpath) 
                
        self.lib.easy_cleanup()

    def traverse_folder(self, path, localpath, remotepath):
        """
        上传单个文件夹，可能存在递归调用子文件夹
        remotepath：ftp服务器路径
        localpath：本地文件夹路径
        """
        project_name_rule = cfg_reader.project_name_rule or "/id_" 
        project_check = cfg_reader.project_check 

        files= os.listdir(path) #得到文件夹下的所有文件名称
    
        for f in files: #遍历文件夹
            if path.endswith("/"):
                filepath = path + f
            else:
                filepath = path + "/" + f
            
            if not os.path.isdir(filepath): #判断是否是文件夹
                re_path = remotepath + filepath[len(localpath):len(filepath)]  #拼接远程文件路径
                logging.debug("++++file: %s", filepath)

                if not project_check or re.search(project_name_rule, filepath)!=None: #判读是否是需同步文件
                    if not (filepath in self.db) : #新增文件
                        self.file_queue.put({"localpath":filepath, "remotepath":re_path})
                    else:
                        info = self.db[filepath].decode()
                        info = json.loads(info)
                        m_time = os.path.getmtime(filepath)
                        m_time = utility.timestamp_to_time(m_time)             
                        if info['status'] == "0" or info['mtime'] != m_time: #上传状态失败或有修改操作的文件
                            self.file_queue.put({"localpath":filepath, "remotepath":re_path})
            else:
                logging.debug("++++dir: %s",filepath)
                time.sleep(0.01)
                self.traverse_folder(filepath, localpath, remotepath)

    def traverse_rootfolder(self):
        """
        遍历根文件夹，根文件可能有多个
        """
        traverse_interval = cfg_reader.traverse_interval or TRAVERSE_INTERVAL
        time_plan_enable = cfg_reader.time_plan_enable

        while True:
            with self.file_queue.mutex:
                self.file_queue.queue.clear()
            with self.retry_queue.mutex:
                self.retry_queue.queue.clear()
            remotepath = cfg_reader.remotepath.strip()

            if time_plan_enable:  #时间计划
                run_enable = in_the_time_plan()
                if run_enable:
                    logging.info("In the time plan!")
                else:
                    logging.info("Not in the time plan!")
                    time.sleep(int(traverse_interval))
                    continue

            for localpath in cfg_reader.localpaths:
                localpath = localpath.strip()
                if localpath == '':
                    continue
                self.traverse_folder(localpath, localpath, remotepath)
            time.sleep(int(traverse_interval))

    def start(self):
        try:
            logging.debug("Start upload file!!!!")
            
            # 加载动态链接库
            if not self.load_library():
                logging.info("load ftp library %s failed", LIB_NAME)
                self.is_running = False
                return
            
            ret = cfg_reader.config_prase("pd_upload.cfg")
            if not ret:
                logging.info("read pd_upload.cfg failed!")
                self.is_running = False
                return
            else:
                logging.info("read pd_upload.cfg success")

            self.ip = ffi.new("char[]", cfg_reader.ip.encode()) 
            self.username = ffi.new("char[]", cfg_reader.username.encode()) 
            self.passwd  = ffi.new("char[]", cfg_reader.passwd.encode())
            self.lib.global_init(self.ip, self.username, self.passwd) # 全局初始化

            self.start_retry_upload() # 根据需要配置是否需要开启重试线程
            self.start_traverse_rootfolder() # 开始遍历定时线程
            
            # 开始upload
            for i in range(len(self.threads)):
                self.threads[i] = threading.Thread(target=self.upload_file)
                self.threads[i].setDaemon(True)
                self.threads[i].start()
            self.is_running = True
        except:
            self.lib.global_cleanup()
            self.is_running = False
    
    def stop(self):
        self.traverse_timer.stop()
        if self.retry_thread_started:
            self.retry_thread.join()
        for i in range(len(self.threads)):
            if not self.threads[i]:
                continue
            self.threads[i].join()
        self.lib.global_cleanup()
        self.is_running = False

def in_the_time_plan():
    #校验当前时间是否在时间计划内
    time_plan = cfg_reader.time_plan
    current_hour = int(time.strftime("%H",time.localtime()))

    if not time_plan or len(time_plan)==0:
        return True
    else:
        for time_period in time_plan:
            if time_period != '':
                r = re.search(r"\[(.*)\-(.*)\]", time_period)
                min_hour = int(r.group(1))
                max_hour = int(r.group(2))
                if current_hour >= min_hour and current_hour <= max_hour:
                    return True
        return False
    
if __name__ == "__main__":
    utility.initLogging("pd_upload.log")
    pd_upload = PDUpload()
    pd_upload.start()
    while pd_upload.is_running != False:
        time.sleep(10)
    pd_upload.stop()
    sys.exit(0)
    logging.error("app exit!")  
