'''
Created on 13.02.2011

@author: public
'''

from struct import SyncStats
import auth
import const
import hashlib
import logging.config
import os
import sys
import time
import urllib
import util
import threading

__logger = logging.getLogger('sync')

def __readWebUrls(urlsFile):
    """Reads web urls file line by line and returns list of these URLs   
    """
    __logger.debug('reading URLs list from [%s]' % urlsFile)    
    urls = open(urlsFile)
    
    return urls.readlines()
    

def __readFilesMD5(dir):
    """Reads all files in specified dir and returns list of theirs md5s  
    """
    
    __logger.debug('reading MD5s of files from [%s]' % dir)
    
    resList = []
    
    listdir = os.listdir(dir)
    
    for filename in listdir:
        #print  filename
        
        tmpFile = open(dir+'/'+filename)
        
        md_5 = hashlib.md5(
            tmpFile.read()).hexdigest()
        
        tmpFile.close()
        
        resList.append(md_5)
        
        #print filename + "=" + md_5
    
    __logger.debug('there are [%s] MD5s read' % len(resList))
    
    return resList  

def __download(opener, url, loggedOutToken):
    """Read the contents of a torrent from a given URL by special rules
    and returns this contents. 
    If file contains 'logged out' token - returns None and sets opener.needsReauth = 1;
    """
    __logger.debug('downloading [%s]' % url)

    webFileId = url.split('=')[-1]

    __logger.debug('web file id [%s]' % webFileId)
      
    values = {'bb_dl' : webFileId}

    data = urllib.urlencode(values)
        
    try :    
        webFile = opener.open(url, data)
            
        contents = webFile.read()    
        webFile.close()
                
        if (contents.startswith('d8:announce')): #typical torrent file start
            __logger.debug('downloaded web file is the valid torrent')
            return contents
        elif (contents.rfind(loggedOutToken) != -1):
            __logger.debug('@@@ logged out token found. Seems the re-authorization is needed')
            opener.needsReauth = 1; 
        else :            
            #__logger.debug('not torrent contents:\n%s' % contents)
            __logger.debug('*** the downloaded file is not a torrent. Just wait a little :) ')                
    except:        
        __logger.debug('*** downloading problem: %s' % sys.exc_info()[1])
    
def __checkingDuplicates(lst) :    
    '''Checks and removes duplicated elements from list    
    '''
    __logger.debug('checking duplicates in MD5 list')
    
    dirtyLen = len(lst)
    
    resList = list(set(lst)) #avoid duplicates
    
    lngth = len(resList)
    
    duplicatesCount = dirtyLen - lngth 
    if (duplicatesCount > 0):
        __logger.debug("%d duplicates are found and removed from list" % duplicatesCount)
        
    return resList

def __stripName(content, defaultRes='not found'):
    '''Parses torrent content for name field. 
    If something is going wrong - returns optional defaultVal 
    '''
    fileTitle = defaultRes
               
    split1 = content.split('name')
    if (len(split1) > 0):
        fileTitle = split1[-1]
        split2 = fileTitle.split(':')                
        if (len(split2) > 2):
            fileTitle = split2[1]
            fileTitle = fileTitle[:-2]    
            
    return fileTitle

def __processItem (stats, opener, url, loggedOutToken, md5List):
    '''Processing single web file item on URL with opener. 
    MD5 of web file checked on existence in md5List. 
    If file not found in MD5 list - saving to target folder
    results
     2 - web file MD5 NOT exists in md5 list
     1 - web file MD5 exists in md5 list      
    -1 - just downloading error
    -2 - reauthorization needed
    '''
    
    webContent = __download(opener, url, loggedOutToken) 
    
    if (webContent == None):
        stats.errorsThrown += 1
        
        if (opener.needsReauth):
            __logger.debug('exiting for re-authorization')
            return -2
            
        return -1

    __logger.debug('calculating MD5. content length [%d]' % len(webContent))
    md_5 = hashlib.md5(webContent).hexdigest()
    
    __logger.debug('MD5: [%s]' % md_5)        
    if (md_5 in md5List) :
        fndPos = md5List.index(md_5)
        __logger.debug('web file has no changes and found at [%s] position of source list' % fndPos)
        stats.filesWithoutChanges += 1
        return 1
    else :
        fileName = url.split('=')[-1]
        
        targetFolder = util.readConfigProperty('folders', 'target')                               
        targetPath = "%s/[webFileSync]%s.torrent" % (targetFolder, fileName)
        
        sourceFolder = util.readConfigProperty('folders', 'source')
        sourcePath = "%s/[webFileSync]%s.torrent" % (sourceFolder, fileName)
        
        fileTitle = __stripName(webContent)
        
        __logger.debug('^^^^^^ web file is updated. Saving to %s' % targetPath)
        __logger.info('^^^^^^ "%s" is updated. Saving...' % fileTitle)
        
        localTargetFile = open(targetPath, 'w')        
        localTargetFile.write(webContent)        
        localTargetFile.close()
        
        localSrcFile = open(sourcePath, 'w')        
        localSrcFile.write(webContent)        
        localSrcFile.close()
        
        stats.filesUpdated += 1
        
        return 2    

def runSync(opener):
    '''Main sync task. Reads config and URLs to sync with. 
    Downloads files and checks theirs MD5s, if MD5s differs - updates file  
    '''
    

    __logger.debug('--------- starting sync ---------')
    
    stats = SyncStats()
    
    loggedOutToken = util.readConfigProperty('auth', 'logged_out_token')
        
    sourceFolder = util.readConfigProperty('folders', 'source')                
    md5List = __readFilesMD5(sourceFolder)
        
    urlsList = __readWebUrls('../' + const.URLS_LST_FILE)    
    urlsList = __checkingDuplicates(urlsList)    
    stats.urlsTotal = len(urlsList)
    
    __logger.debug("%d URLs fetched" % stats.urlsTotal)    
    __logger.debug('loading web files')
        
    thrList = []     
        
    for idx, line in enumerate(urlsList):
        line = line.rstrip();
        
        __logger.debug('start checking %s of %s: %s' % (idx + 1, stats.urlsTotal, line))
        
        t = threading.Thread(target=__processItem, args=(stats, opener, line, loggedOutToken, md5List))             
        t.start()
        
        time.sleep(1)
        
        thrList.append(t)
            
    __logger.debug("waiting for all the threads")   
    for t in thrList:
        t.join()
        
    __logger.debug('--------- ending sync ---------')    
    __logger.info(stats)
    
    
if __name__ == '__main__':
    logging.config.fileConfig('../' + const.LOGGING_CFG_FILE)
    
    user = util.readConfigProperty('auth', 'user')
    psw = util.readConfigProperty('auth', 'psw')
    url = util.readConfigProperty('auth', 'url')
    
    opener = auth.authorize(url, user, psw)
    runSync(opener)
