#!/usr/bin/env python
# -*- coding: utf-8 -*-

import os
import sys
import urllib
import logging
import ctypes
import shutil
import time
import codecs
import time
import getpass

import gdata.docs.data
import gdata.docs.client
import atom

# Constants
extensions = ['doc', 'docx', 'xls', 'xlsx', 'odt', 'ods', 'pdf', 'html', 'htm', 'txt', 'csv']
root_dir = u"C:\\Users\\Shona\\dev\\src\\python\\GoogleDataApi\\GoogleDocSync\\Prép cours"
application_dir = root_dir + '\\.gdocsync'
cache_dir = application_dir + '\\cache'
time_format_string = "%Y-%m-%d,%H:%M:%S"

# Helpers
def filterExtensions(filename):
    ext = filename[filename.find(".")+1:]
    if ext == None:
        return False;

    for extension in extensions:
        if extension == ext.lower():
            if ext.upper() in gdata.docs.data.MIMETYPES:
                return True;
            else:
                return False

def urlToId(url):
    return urllib.unquote(url[url.find("folder"):])

def hideFileOrDir(path):
    ctypes.windll.kernel32.SetFileAttributesW(path, 2)


class GCacheInfos:
    def __init__(self, file_path):
        self.cache = {}
        if os.path.exists(file_path):
            self.cache_file = codecs.open(file_path, encoding='utf-8', mode='r+')
            
            # Read data from the file
            for line in self.cache_file:
                filename, local_time, remote_time = line.split('/')
                remote_time = remote_time.replace('\n','')
                self.cache[filename] = (local_time, remote_time)            
        else:
            self.cache_file = codecs.open(file_path, encoding='utf-8', mode='w')

    def __del__(self):
        self.cache_file.seek(0)
        for key, value in self.cache.iteritems():
            self.cache_file.write(key+"/"+value[0]+"/"+value[1]+"\n")
        self.cache_file.close()

    def updateLocalInfos(self, file_path):
        filename = os.path.basename(file_path)
        m_time = time.gmtime(os.path.getmtime(file_path))
        m_time_string = unicode(time.strftime(time_format_string, m_time), 'utf-8')

        if filename in self.cache:
            self.cache[filename] = (m_time_string, self.cache[filename][1])
        else:
            self.cache[filename] = (m_time_string,'')

    def updateRemoteInfos(self, gFile):
        filename = gFile.title.text
        m_time_string = gFile.updated.text
        m_time = time.strptime(m_time_string, "%Y-%m-%dT%H:%M:%S"+m_time_string[m_time_string.find('.'):])
        m_time_string = unicode(time.strftime(time_format_string, m_time), 'utf-8')

        if filename in self.cache:
            self.cache[filename] = (self.cache[filename][0], m_time_string)
        else:
            self.cache[filename] = ('', m_time_string)

    def isRemoteModified(self, gFile):
        filename = gFile.title.text
        if filename in self.cache:
            m_time_string = gFile.updated.text
            m_time = time.strptime(m_time_string, "%Y-%m-%dT%H:%M:%S"+m_time_string[m_time_string.find('.'):])
            m_cache_time = time.strptime(self.cache[filename][1], time_format_string)

            for i in range(0,6):
                if m_time[i] > m_cache_time[i]:
                    #print m_time_string
                    #print self.cache[filename][1]
                    return True
            return False
        else:
            return False

    def isLocalModified(self, file_path):
        filename = os.path.basename(file_path)
        if filename in self.cache:
            m_time = time.gmtime(os.path.getmtime(file_path))
            m_cache_time = time.strptime(self.cache[filename][0], time_format_string)
            for i in range(0,6):
                if m_time[i] > m_cache_time[i]:
                    return True
            return False
        else:
            return False
        



class GDirectory:
  def __init__(self, entry):
    self.entry = entry

  def name(self):
      return self.entry.title.text

  def id(self):
      return self.entry.resource_id.text
  
  def __parent(self):
      if not self.entry.InFolders():
          return None
      else:
          return self.entry.InFolders()[0]

  def parentName(self):
      parent = self.__parent()
      if parent != None:
          return parent.text
      return None

  def parentId(self):
      parent = self.__parent()
      if parent != None:
          return urlToId(parent.href)
      return None

  def resumableLink(self):
      for link in self.entry.link:
          if 'resumable-create-media' in link.rel:
              return link.href


def FetchDirectories():
  print "Fetching remote directories informations..."
  feed = client.GetDocList(uri='/feeds/default/private/full/-/folder')
  directories = []
  for entry in feed.entry:
      directories.append(GDirectory(entry))
  return directories

def GetGFile(folder, filename):
    feed = client.GetDocList(uri=folder.entry.content.src)
    for child in feed.entry:
        if child.title.text == filename:
            return child 
    return None

def UpdateFile(gFile, path, filename, cache):
    print "Updating "+filename+"..."
    name, ext = os.path.splitext(filename)
    file_path = os.path.join(path, filename)
    new_gFile = None
    #if ext == ".pdf":
    if True:
        f = open(file_path, 'rb')
        file_size = os.path.getsize(f.name)
        uploader = gdata.client.ResumableUploader(client, f, gdata.docs.data.MIMETYPES[ext[1:].upper()], file_size, desired_class=gdata.docs.data.DocsEntry)
        new_gFile = uploader.UpdateFile(gFile, force=True)
        f.close()
    else:
        ms = gdata.data.MediaSource(file_path=file_path, content_type=gdata.docs.data.MIMETYPES[ext[1:].upper()])
        new_gFile = client.Update(gFile, media_source=ms)

    cache.updateLocalInfos(file_path)
    cache.updateRemoteInfos(new_gFile)

def CreateAndUploadFile(folder, path, filename, cache):
    print "Creating and uploading "+filename+"..."
    name, ext = os.path.splitext(filename)
    file_path = os.path.join(path, filename)
    new_gFile = None
    #if ext == ".pdf":
    if True:
        f = open(file_path, 'rb')
        file_size = os.path.getsize(f.name)
        uploader = gdata.client.ResumableUploader(client, f, gdata.docs.data.MIMETYPES[ext[1:].upper()], file_size, desired_class=gdata.docs.data.DocsEntry)
        entry = gdata.docs.data.DocsEntry(title=atom.data.Title(text=filename))
        new_gFile = uploader.UploadFile(folder.resumableLink() + '?convert=false', entry=entry)
        f.close();
    else:
        new_gFile = client.Upload(file_path, filename, content_type=gdata.docs.data.MIMETYPES[ext[1:].upper()], folder_or_uri=folder.entry)

    cache.updateLocalInfos(file_path)
    cache.updateRemoteInfos(new_gFile)

def DeleteGFile(gFile, cache):
    if cache.isRemoteModified(gFile):
        print "Attempt to delete "+gFile.title.text+" remote file because it doesn't exist anymore on the local disk. Delete it manually if it's really what you wanted"
    else:
        client.Delete(gFile.GetEditLink().href + '?delete=true', force=True)

def UploadFiles(folder, path, files, cache):
    filtered_files = filter(filterExtensions, files)

    feed = client.GetDocList(uri=folder.entry.content.src)
    for child in feed.entry:
        if child.GetDocumentType() == 'folder':
            continue
        found = False
        for filename in filtered_files:
            if child.title.text == filename:
                filtered_files.remove(filename)
                found = True
                break;
        if found:
            # Check here cache information
            remote_modified = cache.isRemoteModified(child)
            local_modified = cache.isLocalModified(os.path.join(path, child.title.text))
            if local_modified:
                if remote_modified:
                    print "Conflict for file "+child.title.text+"! You have to resolve it manually!"
                else:
                    UpdateFile(child, path, child.title.text, cache)
            elif remote_modified:
                print "Need to update local file "+child.title.text+"! This isn't done automatically right now."
            else:
                print child.title.text+" is up to date !"
        else:
            DeleteGFile(child, cache)

    for filename in filtered_files:
        CreateAndUploadFile(folder, path, filename, cache)

def GetGDirectory(parent, name):
    for directory in directories:
        if directory.name() == name and ((not parent == None and parent.id() == directory.parentId()) or (parent == None and directory.parentId() == None)):
            return directory
    return None

def GetGDirectoryChilds(gDir):
    gChilds = []
    feed = client.GetDocList(uri=gDir.entry.content.src+'/-/folder')
    for child in feed.entry:
        gChilds.append(GDirectory(child))
    return gChilds
            
def CreateGDirectory(parent, name):
    gDir = None
    if parent != None:
        gDir = GDirectory(client.Create(gdata.docs.data.FOLDER_LABEL, name, folder_or_id=parent.id()))
    else:
        gDir = GDirectory(client.Create(gdata.docs.data.FOLDER_LABEL, name))
    directories.append(gDir)
    return gDir

def DeleteGDirectory(gDir):
    gChilds = []
    feed = client.GetDocList(uri=gDir.entry.content.src)
    for child in feed.entry:
        if child.GetDocumentType() == 'folder':
            DeleteGDirectory(GDirectory(child))
        else:
            client.Delete(child.GetEditLink().href + '?delete=true', force=True)
    client.Delete(gDir.entry.GetEditLink().href + '?delete=true', force=True)
    
def SynchDirectory(parent, path):
    for current_dir, dirs, files in os.walk(path):
        print "Sync '"+path+"' directory..."
        # Check if a cache file exists
        root_dir_name = os.path.basename(root_dir)
        cache_file_name = root_dir_name + path.replace(root_dir,'')
        cache_file_name = cache_file_name.replace(os.sep,'#')
        cache_file_path = os.path.join(cache_dir, cache_file_name)
        cache = GCacheInfos(cache_file_path)

        name = os.path.split(path)[1]
        gDir = GetGDirectory(parent, name)
        if gDir == None:
            gDir = CreateGDirectory(parent, name)
        else:
            gChilds = GetGDirectoryChilds(gDir)
            for gChild in gChilds:
                found = False
                for subDir in dirs:
                    if gChild.name() == subDir:
                        found = True
                        break
                if found == False:
                    DeleteGDirectory(gChild)
                        
        UploadFiles(gDir, path, files, cache)

        for subDir in dirs:
            if subDir != os.path.split(cache_dir)[1]:
                SynchDirectory(gDir, os.path.join(path, subDir))
        
        del dirs[:]

#help(gdata.docs.data.DocsEntry)

class ScopeProfiler:
    def __init__(self):
        self.start = time.time()

    def __del__(self):
        time_elapsed = time.gmtime(time.time() - self.start)
        print "Time elapsed: "+time.strftime("%H:%M:%S", time_elapsed)

program_name = "GoogleDocSync"
version = 0.1
max_authentication_attempt = 3

def Authenticate(client):
    file_path = os.path.join(application_dir,'authentication')
    if os.path.exists(file_path):
        file = codecs.open(file_path, encoding='utf-8', mode='r')
        token = file.readline()
        client.auth_token = gdata.gauth.ClientLoginToken(token)
        return True
    else:
        print "Can't retrieve authentication token locally."
        attempt = 0
        while attempt < max_authentication_attempt:
            attempt = attempt + 1
            try:
                login = raw_input("Enter your email address: ")
                password = getpass.getpass("Enter your password:")
                print "Authentication ..."
                client.ClientLogin(login, password, client.source)
                file = codecs.open(file_path, encoding='utf-8', mode='w')
                file.write(client.auth_token.token_string)
                return True
            except Exception:
                print "Bad login or password, try again. " + str(max_authentication_attempt - attempt) + " attempts remaining."

        return False

profiler = ScopeProfiler()
print "Welcome To " + program_name + " v" + str(version) + " " + getpass.getuser() + "."

# Setting up the GDocs client
client = gdata.docs.client.DocsClient(source='AlexandreChassany-GoogleDocDirSync-v1')
client.ssl = True  # Force all API requests through HTTPS
client.http_client.debug = False  # Set to True for debugging HTTP requests
client.api_version = '3.0'

# Setting up local program stuff
if not os.path.isdir(application_dir):
    os.makedirs(application_dir)
    os.makedirs(cache_dir)
    hideFileOrDir(application_dir)

# Authentication process
if not Authenticate(client):
    print "Authentication failed. Exiting the program"
    exit(0)
else:
    print "Authentication succeeded."

# Get GDocs folders
directories = FetchDirectories()

# Synch directory
SynchDirectory(None,root_dir)



