#!/usr/bin/env python

"""
 Simple sync for Google Docs.

Deps:
 python2.4 or so
 gdata api v16:  http://gdata-python-client.googlecode.com/files/gdata-2.0.16.zip
"""

__author__ = 'Nathaniel Fairfield <nathanielfairfield@gmail.com>'

import gdata.docs.client
import gdata.docs.data
import getopt
import getpass
import os
import shutil
import sys
import tempfile
import time


# Timestamps are 'Z' (UTC).
os.environ['TZ'] = 'Z'
time.tzset()


def safe_mkdir(newdir):
    """Create directory path(s) ignoring "already exists" errors"""
    result_dir = os.path.abspath(newdir)
    try:
        os.makedirs(result_dir)
    except OSError, info:
        if info.errno == 17 and os.path.isdir(result_dir):
            pass
        else:
            raise


def TimeStringToUnixTime(s):
    """ s is of the form '2010-12-04T22:42:09.612Z' """
    Y, M, D = map(int, s[:10].split('-'))
    h, m, s = map(int, s[11:19].split(':'))
    t = time.mktime((Y, M, D, h, m, s, 0, 0, 0))
    return int(t)


class SyncClient(object):
    def __init__(self, email, root_path):
        print 'Creating client'
        self.root_path = root_path
        self.client = gdata.docs.client.DocsClient()
        self.client.ssl = True  # Force all API requests through HTTPS
        self.client.http_client.debug = False  # True for debugging HTTP requests
        try:
            token_string = open('.gdfs_token').read()
            self.client.auth_token = gdata.gauth.token_from_blob(token_string)
        except:
            token = self.client.RequestClientLoginToken(
                email, getpass.getpass(), 'gdfs')
            self.client.auth_token = token
            open('.gdfs_token', 'w').write(gdata.gauth.token_to_blob(token))
        self.BuildResourceMap()
        self.BuildFileTree()

    def BuildResourceMap(self):
        print 'Building resource map'
        RESOURCE_FEED_URI = '/feeds/default/private/full' + '?showfolders=true'
        self.resource_map = {}
        for resource in self.client.GetAllResources(RESOURCE_FEED_URI):
            # Hand-coded method to extract parent id.
            parent_id = ''
            for link in resource.link:
                if link.rel == gdata.docs.data.PARENT_LINK_REL:
                    parent_id = link.href[(link.href.rfind('/') + 1):].replace('%3A', ':')
            resource.parent_id = parent_id
            self.resource_map[resource.resource_id.text] = resource

    def BuildFileTree(self):
        print 'Building file tree'
        # Build a map from directory to content ids.
        id_map = {}
        for key, value in self.resource_map.iteritems():
            id_map.setdefault(value.parent_id, []).append(value.resource_id.text)
        
        def AddResource(resource_id, sub_tree):
            # Recursively assemble the file tree.
            if resource_id == '':
                resource_name = ''
            else:
                resource_name = self.resource_map[resource_id].title.text
            for sub_resource_id in id_map[resource_id]:
                sub_resource_name = self.resource_map[sub_resource_id].title.text
                if sub_resource_id in id_map:
                    sub = sub_tree.setdefault(sub_resource_name, [sub_resource_id, {}])
                    AddResource(sub_resource_id, sub[1])
                else:
                    sub_tree[sub_resource_name] = sub_resource_id

        self.file_tree = {}
        # There may be no resources!
        if '' in id_map:
            AddResource('', self.file_tree)

    def FilterFileTree(self, folder_id, force_include=False):
        """Removed everything from in memory list bar folder_id, so that syncing only occurs with that directory/folder.
        Basically a modified BuildFileTree()
        """
        print 'Filtering file tree'
        if not folder_id.startswith('folder:'):
            folder_id = 'folder:%s' % folder_id
        
        # Build a map from directory to content ids.
        id_map = {}
        for key, value in self.resource_map.iteritems():
            id_map.setdefault(value.parent_id, []).append(value.resource_id.text)
        
        def AddResource(resource_id, sub_tree, force_include=False):
            # Recursively assemble the file tree.
            if resource_id == '':
                resource_name = ''
            else:
                resource_name = self.resource_map[resource_id].title.text
            for sub_resource_id in id_map[resource_id]:
                sub_resource_name = self.resource_map[sub_resource_id].title.text
                if sub_resource_id in id_map:
                    if resource_id == folder_id:
                        force_include = True
                    sub = sub_tree.setdefault(sub_resource_name, [sub_resource_id, {}])
                    AddResource(sub_resource_id, sub[1], force_include=force_include)
                else:
                    if force_include or resource_id == folder_id:
                        sub_tree[sub_resource_name] = sub_resource_id
        self.file_tree = {}
        # There may be no resources!
        if '' in id_map:
            AddResource('', self.file_tree)

    def _SyncDown(self, path, sub_tree):
        for key, value in sub_tree.iteritems():
            sub_path = os.path.join(path, key)
            if type(value) is list:
                self._SyncDown(sub_path, value[1])
            else:
                if value.startswith('spreadsheet'):
                    print 'IGNORING spreadsheet %r' % key
                else:
                    resource = self.resource_map[value]
                    cloud_mtime = TimeStringToUnixTime(resource.updated.text)
                    if os.path.isfile(sub_path):
                        local_mtime = os.path.getmtime(sub_path)
                        if (cloud_mtime <= local_mtime):
                            continue
                    print 'syncdown', sub_path
                    f, tmp_path = tempfile.mkstemp()
                    os.close(f)
                    self.client.DownloadResource(self.resource_map[value], tmp_path)
                    parent_sub_path = os.path.dirname(sub_path)
                    if not os.path.exists(parent_sub_path):
                        safe_mkdir(parent_sub_path)
                    shutil.copy(tmp_path, sub_path)
                    os.remove(tmp_path)
                    os.utime(sub_path, (cloud_mtime, cloud_mtime))

    def SyncDown(self):
        print 'Syncing from remote down to local'
        self._SyncDown(self.root_path, self.file_tree)

    def FindResource(self, path):
        s = path.split('/')  # TODO more portable
        i = 0
        max_i = len(s)
        sub_tree = self.file_tree
        while i < max_i:
            if s[i] in sub_tree:
                if i == (max_i - 1):
                    if type(sub_tree[s[i]]) == list:
                        resource_id = sub_tree[s[i]][0]
                    else:
                        resource_id = sub_tree[s[i]]
                    if resource_id in self.resource_map:
                        return self.resource_map[resource_id]
                    else:
                        return None
                else:
                    sub_tree = sub_tree[s[i]][1]
                i += 1
            else:
                return None
            #print s, i, max_i, type(sub_tree)

    def SyncUp(self):
        print 'Syncing from local up to remote'
        skip_path = len(self.root_path) + 1
        # Crawl the local file tree
        for root, dirs, files in os.walk(self.root_path):
            remote_root = root[skip_path:]
            for d in dirs:
                remote_path = os.path.join(remote_root, d)
                resource = self.FindResource(remote_path)
                if resource == None:
                    # Create the remote directory
                    print 'creating remote dir', remote_path
                    folder = gdata.docs.data.Resource(type='folder', title=d)
                    parent_resource = self.FindResource(os.path.dirname(remote_path))
                    if parent_resource != None:
                        folder.AddLabel('hidden')
                    resource = self.client.CreateResource(folder, collection=parent_resource)
                    # Add to the resource map
                    if parent_resource != None:
                        resource.parent_id = parent_resource.resource_id.text
                    else:
                        resource.parent_id = ''
                    self.resource_map[resource.resource_id.text] = resource
                elif resource.GetResourceType() != 'folder':
                    print 'Error: remote', remote_path, 'is not a folder!'
            for f in files:
                remote_path = os.path.join(remote_root, f)
                local_path = os.path.join(root, f)
                resource = self.FindResource(remote_path)
                if (resource == None):
                    print 'creating remote file', remote_path
                    media = gdata.data.MediaSource()
                    media.SetFileHandle(local_path, 'application/octet-stream')
                    doc = gdata.docs.data.Resource(type='document', title=f)
                    parent_resource = self.FindResource(os.path.dirname(remote_path))
                    if parent_resource != None:
                        doc.AddLabel('hidden')
                    try:
                        resource = self.client.CreateResource(doc, media=media,
                                                              collection=parent_resource)
                        if parent_resource != None:
                            resource.parent_id = parent_resource.resource_id.text
                        else:
                            resource.parent_id = ''
                        self.resource_map[resource.resource_id.text] = resource
                    except:
                        print 'Error creating', remote_path
                    continue
                cloud_mtime = TimeStringToUnixTime(resource.updated.text)
                local_mtime = os.path.getmtime(local_path)
                if local_mtime > cloud_mtime:
                    print 'syncup', remote_path
                    media = gdata.data.MediaSource()
                    media.SetFileHandle(local_path, 'application/octet-stream')
                    resource = self.client.UpdateResource(resource, media=media, force=True)
            # TODO: don't rebuild the tree all the time!  But for now, its necessary
            # so we can find the new folders.
            self.BuildFileTree()

    def CleanLocal(self):
        print 'Cleaning local files'
        skip_path = len(self.root_path) + 1
        for root, dirs, files in os.walk(self.root_path):
            remote_root = root[skip_path:]
            for d in dirs:
                remote_path = os.path.join(remote_root, d)
                resource = self.FindResource(remote_path)
                if resource == None:
                    # Delete the local dir
                    local_path = os.path.join(root, d)
                    print 'deleting local', local_path
                    shutil.rmtree(local_path, ignore_errors=True)
            for f in files:
                remote_path = os.path.join(remote_root, f)
                resource = self.FindResource(remote_path)
                if resource == None:
                    # Delete the local file
                    local_path = os.path.join(root, f)
                    print 'deleting local', local_path
                    os.remove(local_path)

    def _CleanRemote(self, path, sub_tree):
        for key, value in sub_tree.iteritems():
            sub_path = os.path.join(path, key)
            if type(value) is list:
                # Check that the directory exists locally
                if not os.path.isdir(sub_path):
                    # Delete the remote dir
                    resource = self.resource_map[value[0]]
                    print 'deleting remote', sub_path
                    self.client.DeleteResource(resource, permanent=True, force=True)
                else:
                    self._CleanRemote(sub_path, value[1])
            else:
                # Check that the file exists locally
                if not os.path.isfile(sub_path):
                    # Delete the remote file
                    resource = self.resource_map[value]
                    print 'deleting remote', sub_path
                    self.client.DeleteResource(resource, permanent=True, force=True)

    def CleanRemote(self):
        print 'Cleaning remote files'
        self._CleanRemote(self.root_path, self.file_tree)


def usage():
    print """gd_sync.py --email email --local_path local_path [--sync_up] [--sync_down] [--clean_remote] [--clean_local]

   --email: your google docs account email address
   --local_path: path to your local sync directory
   --filter_id: path to Gdrive directory key to restrict to (see below)

  Multiple of the following options can be specified, they will be executed in order:
   --sync_up: sync from local to remote
   --sync_down: sync from remote to local
   --clean_remote: delete remote files that are not present locally
   --clean_local: delete local files that are not present remotely

Example filter_id:
    
    https://docs.google.com/folder/d/abcdefghijklmnopqrstuvwxyz12/
    
    would be:
        --filter_id=abcdefghijklmnopqrstuvwxyz12
  """


if __name__ == '__main__':
    try:
        opts, args = getopt.getopt(sys.argv[1:], 'h',
                                   ['help', 'email=', 'local_path=',
                                    'filter_id=',
                                    'sync_up', 'sync_down',
                                    'clean_remote', 'clean_local'])
    except getopt.GetoptError, err:
        print str(err)
        usage()
        sys.exit(2)
    
    filter_id = None
    for o, a in opts:
        if o == '--email':
            email = a
        if o == '--local_path':
            local_path = a
        if o == '--filter_id':
            filter_id = a
        elif o in ("-h", "--help"):
            usage()
            sys.exit(0)
    if email == '' or local_path == '':
        usage()
        sys.exit(2)

    client = SyncClient(email, local_path)
    if filter_id:
        print 'filter_id %r' % filter_id
        client.FilterFileTree(filter_id)

    for o, a in opts:
        if o == '--sync_up':
            client.SyncUp()
        if o == '--sync_down':
            client.SyncDown()
        if o == '--clean_remote':
            client.CleanRemote()
        if o == '--clean_local':
            client.CleanLocal()
