#!/usr/local/python/bin/python

import os
import time
import glob
import sys
import posixpath
import logging
import re
import datetime
import xml.etree.ElementTree as ET
from time import mktime
from subprocess import call

import UnRAR2
import ftputil

import sfv

# from M2Crypto import ftpslib
from ftplib import FTP_TLS

class Release:
    def __init__(self, remote_path, local_path, name, category, date):
        self.remote_path = remote_path
        self.local_path = local_path
        self.category = category
        self.name = name
        self.date = date
##        self.season = season
##        self.episode = episode

def extract_date(input_line):
    for block in input_line.split(" "):
        try:
            return datetime.datetime.strptime(block, "%Y-%m-%d")
        except ValueError as err:
            continue
    return datetime.date

def extract_path(input_line):
    start = input_line.find('/')
    if start == -1:
        return ""
    end = input_line.find(' ', start)
    return  posixpath.normpath(input_line[start:end])

def _search_releases(remote_host, search_term, category, from_date):
    matching_releases = []
    
    for rel in get_releases_from_search2(remote_host._session.sendcmd("site search " + search_term), from_date):
        print rel.name
        
        matching_releases.append(rel)
    return matching_releases
    
def _get_releases_from_search(search_output, from_date):
    _list = []
    for line in search_output.split('\n'):        
        full_release_name = extract_path(line)
        if(full_release_name == ""):
            continue;
        release_name =  posixpath.basename(full_release_name)
        release_date =  extract_date(line)
        if release_date <= from_date:
            continue;
        
        category = full_release_name[1:full_release_name.index('/', 1)]
        #season_episode = re.findall('S[0-9]+.E[0-9]+', release_name)
##        if season_episode:
##           s_e = season_episode[0].replace('S',' ').replace('E',' ').split()
##           season = int(s_e[0])
##           episode = int(s_e[1])
##        else:
##            season = 0
##            episode = 0
        _list.append(Release(full_release_name, release_name, category, release_date))
    return _list
def get_releases_from_search(remote_host, search_pattern, category, from_date, local_destination, log_location):
    _list = []
    remote_host.chdir("/")
    #Move to the remote folder
    remote_host.chdir(category)
    
    #List folders
    names = remote_host.listdir(remote_host.curdir)
    main_logger.info("Looking for pattern '%s'" % (search_pattern))
    for name in names:
        if re.match(search_pattern, name):
            #Too old?
            release_date = datetime.datetime.fromtimestamp(remote_host.stat(name).st_mtime)
            if release_date <= from_date:
                continue

            #Already downloaded?
            if os.path.exists(os.path.join(log_location, "[OK]-%s.log" % (name))):
                continue
            
            full_local_path = os.path.join(local_destination, release_date.strftime("%Y-%m-%d") + "_" + name)
            release_name = name
            full_release_name = posixpath.join(remote_host.getcwd(), release_name)
            _list.append(Release(full_release_name, full_local_path, name, category, release_date))
            main_logger.info("Found match for '%s'" % (name))
    return _list

SIZE_TRANSFERED = 0
def test_callback(chunk):
    global SIZE_TRANSFERED
    SIZE_TRANSFERED += len(chunk)
    
class SSLFTPSession(FTP_TLS):
    
    def __init__(self, host, userid, password, port):
        FTP_TLS.__init__(self)
        self.connect(host,port)
        self.login(userid, password)           # login anonymously before securing control channel
        self.prot_p()
        
def get_release_name(release_full_path):
    return posixpath.basename(posixpath.normpath(release_full_path))

def get_file_for_ext(files, ext):
    for file_name in files:
        name, fileExtension = os.path.splitext(file_name)
        if fileExtension == ext:
            return file_name
    return ""

transferred_size = 0
def transfer_callback(chunk):
    global transferred_size
    transferred_size += len(chunk)
    
def download_file(remote_host, remote_source, local_target, logger):   
    start = time.clock()
    remote_host.download(remote_source, local_target, 'b', callback=transfer_callback)
    elapsed = time.clock() - start
    global transferred_size   
    logger.info("downloaded '%s' (%4.2fMB) in %4.2f seconds (avg %4.2f MB/s)" % (local_target, transferred_size/(1024*1024), elapsed, transferred_size/(1024*1024)/elapsed))
    transferred_size = 0
    return transferred_size

def extract_file(file_name, logger):
    start = time.clock()
    main_logger.info("Extracting '%s'" % (file_name ))
    UnRAR2.RarFile(file_name).extract()
    elapsed = time.clock() - start
    main_logger.info("Extracted '%s' (%4.2fMB) in %4.2f seconds" % (file_name, os.path.getsize(file_name) ,elapsed))
    
def open_log(log_file_name):
    logger = logging.getLogger('simple_example')
    logger.setLevel(logging.DEBUG)
    fh = logging.FileHandler(log_file_name)
    fh.setFormatter(logging.Formatter('%(asctime)s %(message)s'))
    logger.addHandler(fh)
    return logger
    
def close_log(logger):
    x = list(logger.handlers)
    for i in x:
        logger.removeHandler(i)
        i.flush()
        i.close()
        
def extraction_completed(release):
    call(["synoindex", "-A "+ release.local_path])
    
def download_release(remote_host, log_location, release, local_destination, on_extraction_completed=None):
    main_logger.info("Downloading %s..." % (release.name))
    #ensure log location exists
    if not os.path.exists(log_location):
        os.makedirs(log_location)

    #ensure local path exists
    full_local_path = os.path.join(local_destination, release.date.strftime("%Y-%m-%d") + "_" + release.name)
    if not os.path.exists(full_local_path):
        os.makedirs(full_local_path)

    os.chdir(full_local_path)
    
    #create log file
    log_file_name = os.path.join(log_location, "[DOWNLOADING]-%s.log" % (release.name))
    logger = open_log(log_file_name)

    #Move to the remote folder
    remote_host.chdir(release.remote_path)
    
    #Get .nfo and .sfv files
    names = remote_host.listdir(remote_host.curdir)
    for extension in [".sfv", ".nfo"]:
        file_name = get_file_for_ext(names, extension)
        if file_name and not os.path.exists(file_name):
            download_file(remote_host, file_name, file_name, logger)
            
    # Get files that need to be transferred based on the sfv file
    files_to_transfer = []
    local_sfv_file = get_file_for_ext(names, ".sfv")

    #check if a .sfv has been found
    if local_sfv_file:
        files_to_transfer = sfv.get_files_to_transfer(local_sfv_file)
        while len(files_to_transfer) > 0:
            for file_name in files_to_transfer:
                download_file(remote_host, file_name, file_name, logger)
            files_to_transfer = sfv.get_files_to_transfer(local_sfv_file)

        #Extract
        for file_name in glob.glob("*.rar"):
            extract_file(file_name, logger)
            break

        if on_extraction_completed is not None:
            on_extraction_completed(release)
    else:
        logger.info("no .sfv file found")
        
    #Close log file
    close_log(logger)
    
    os.rename(log_file_name, log_file_name.replace("[DOWNLOADING]", "[OK]"))
    return True

####################################################################################
#    MAIN
####################################################################################
userid="sc0ut"
password="tf2r0ck5"
host_name="leech.darksta.in"
port=65322

#local_download_location = "D:/tester/test"#sys.argv[1]
#log_location = "D:/tester/test/logs"#sys.argv[2]
#xml_file = "D:/Auto-dl-py/releases.xml"#sys.argv[3]

local_download_location = sys.argv[1]
log_location = sys.argv[2]
xml_file = sys.argv[3]

main_logger = open_log(os.path.join(log_location, "%s.log" % (datetime.date.today().strftime("%Y-%m-%d") )))
main_logger.info("Log initialized")
if os.path.exists(xml_file):
    tree = ET.parse(xml_file)
    root = tree.getroot()
    main_logger.info("Connecting to %s" % (host_name))
    with ftputil.FTPHost(host_name, userid, password, port,
                         session_factory=SSLFTPSession) as remote_host:
        main_logger.info("Connected to %s" % (host_name))
        for search in root.findall("./search"):
            for release in get_releases_from_search(remote_host, search.get('pattern'), search.get('category'), datetime.datetime.strptime(search.get('from_date'), "%Y-%m-%d"), local_download_location, log_location):
                if download_release(remote_host, log_location, release, local_download_location, on_extraction_completed=extraction_completed):
                    search.set('from_date', release.date.strftime("%Y-%m-%d"))
                    tree.write(xml_file)





