#!/usr/bin/env python
# -*- coding: utf8 -*-
#
# kate: space-indent on; indent-width 4; mixedindent off; indent-mode python
#
# $Id: picasapush.py 13 2007-07-19 01:12:02Z havard.gulldahl $
#
# Copyright 2007-2008 Håvard Gulldahl
#
#
# Licensed under the General Public License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#      http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""A Fuse application of gdata.photo services."""

__author__ = u'havard.gulldahl@gmail.com (Håvard Gulldahl)'
__version__ = '0.10'

import pdb

F_BSIZE = 1024*10 #- Preferred file system block size. (int)
F_FRSIZE = F_BSIZE #- Fundamental file system block size. (int)
F_BLOCKS = 100000 #- Total number of blocks in the filesystem. (long)
F_BFREE = 25000 #- Total number of free blocks. (long)
F_BAVAIL = 15000 #- Free blocks available to non-super user. (long)
F_FILES = 0L #- Total number of file nodes. (long)
F_FFREE = 0L #- Total number of free file nodes. (long)
F_FAVAIL = 0L #- Free nodes available to non-super user. (long)
#F_FLAG #- Flags. System dependent: see statvfs() man page. (int)
F_NAMEMAX = 64 #- Maximum file name length. (int)

CACHE_TIMEOUT = 100 ## seconds
CONFIG_THREADS = 4 ## no. of threads

TRUE = ('yes', 'YES', 'Yes', 'True', 'true', 'TRUE', '1', 'enabled', 'Enabled')

import os, os.path, sys, time, StringIO, codecs, logging, logging.handlers, re
import errno, stat, string, ConfigParser, urllib, threading, Queue

logging.basicConfig(level=logging.DEBUG,
                    format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
                    datefmt='%m-%d %H:%M',
                    filename='/tmp/picasafs.%s.log' % os.getpid(),
                    filemode='w')

TAG_DIR = ()#('TAGS',)
UPLOAD_DIRS = ('.UPLOAD',)
QUICK_DIR = '.QUICKLIST'
LOG_FILES = ('.LOG',) #'.error')
HELP_FILE = '.HELP'

METADATA_FILES = { 'description.txt': 'description',
                   'tags.txt': 'keywords',
                   'date.txt': 'date',
                   'rotation.txt': 'rotation',
                   'checksum.txt': 'checksum',
                   'position.txt': 'where',
                   'client.txt': 'client',
                   'shortname.txt': 'title'
                 }


FILETYPE2EXT = { 'image/jpeg':'jpg',
                 'image/gif':'gif',
                 'image/png':'png',
                 'image/tiff':'tif' }



import fuse, mimetypes
fuse.fuse_python_api = (0, 1)
try:
    from PIL import Image, ImageFile
    USE_IMAGING=True
    UPLOAD_DIRS += ('.UPLOAD_640x480', '.UPLOAD_800x600', '.UPLOAD_1024x768', '.UPLOAD_1600x1200', )
except ImportError:
    USE_IMAGING=False


try:
    import magic
    USE_MAGIC=True
except ImportError:
    USE_MAGIC=False

import atom
atom.XML_STRING_ENCODING=None # workaround bug

import gdata.photos
import gdata.photos.service

import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)

class Inode(object):
    """Common base class for all file system objects
    """

    def __init__(self, path=None, id='', mode=None, size=0L, mtime=None, ctime=None):
        self.nlink = 1
        self.size = size
        self.id = id
        self.mode = mode
        self.ino = long(time.time())
        self.dev = 409089L
        self.uid = int(os.getuid())
        self.gid = int(os.getgid())
        now = int(time.time())
        self.atime = now
        if mtime is None: self.mtime = now
        else: self.mtime = int(mtime)
        if ctime is None: self.ctime = now
        else: self.ctime = int(ctime)
        self.blocksize = F_BSIZE


class DirInode(Inode):

    def __init__(self, path=None, id="", mode=None, mtime=None, ctime=None):
        if mode is None: mode = 0755
        super(DirInode, self).__init__(path, id, mode, 0L, mtime, ctime)
        self.mode = stat.S_IFDIR | self.mode
        self.nlink += 1
        self.dirfile = ""
        self.setId = self.id

class AlbumInode(DirInode):
    def __init__(self, album=None, path=None):
        self.album = album
        timestamp = int(album.timestamp.text)/1000
        if album.access.text == 'public': _mode = 0755
        else: _mode = 0700
        DirInode.__init__(self, path=path, mode=_mode, mtime=timestamp, ctime=timestamp)

class PhotoInode(DirInode):
    def __init__(self, photo=None, album=None, path=None):
        self.photo = photo
        self.album = album
        timestamp = int(photo.timestamp.text)/1000
        if album.access.text == 'public': _mode = 0755
        else: _mode = 0700
        DirInode.__init__(self, path=path, mode=_mode, mtime=timestamp, ctime=timestamp)

class FileInode(Inode):

    def __init__(self, path=None, id="", mode=None, comm_meta="", size=1L, mtime=None, ctime=None):
        if mode is None: mode = 0644
        super(FileInode, self).__init__(path, id, mode, size, mtime, ctime)
        self.mode = stat.S_IFREG | self.mode
        self.buf = ""
        self.queryId = self.id
        self.comm_meta = comm_meta

class PhotoAttrInode(FileInode):
    def __init__(self, photo, path=None, mode=None, size=None):
        self.photo = photo
        timestamp = int(photo.timestamp.text)/1000
        FileInode.__init__(self, path=path, mode=mode, size=long(size), mtime=timestamp, ctime = timestamp)

class CommentInode(FileInode):
    def __init__(self, comment, path=None):
        self.comment = comment
        mtime = gdata.photos.service.ConvertAtomTimestampToEpoch(comment.updated.text)
        ctime = gdata.photos.service.ConvertAtomTimestampToEpoch(comment.published.text)
        try: size = len(comment.content.text) ## TODO: is this right?
        except AttributeError: # Api is confused, use summary element instead of content
            size = len(comment.summary.text)
        mode = 0644
        if comment.GetEditLink() is None: #readonly -- we are not allowed to unlink()
            mode = 0444
        FileInode.__init__(self, path=path, mode=mode, size=long(size), mtime=mtime, ctime=ctime)

class request_thread(threading.Thread):
    def __init__(self, queue):
        self.queue = queue
        threading.Thread.__init__(self)
    def run(self):
        while True: # serve forever
            # get at task
            try:
                func, args, kwords = self.queue.get()
            except Queue.Empty:
                print "empty q"
                continue
            try:
                print "Running func: %s ( %s, %s )" % (func, args, kwords)
                func(*args, **kwords)
            except Exception, e:
                logging.error(e)
                print e
            else:
                self.queue.task_done()


class picasafs (fuse.Fuse):
    _albums = {}
    _albums_last_seen = -1
    _albumFeed = {}
    _photos = {}
    _photos_last_seen = {}
    _photoFeed = {}
    _photo_metadata = {}
    _photo_desktopentry = {}
    _tags = {}
    _tags_last_seen = {}
    _tagFeed = {}
    _usertags = {}
    _usertags_last_seen = -1
    _usertagFeed = {}
    _comments = {}
    _comments_last_seen = {}
    _commentFeed = {}
    _inode_cache = {}
    _file_cache = {}
    _comment_id_to_pretty_name_map = {}
    _userFeed = None
    config_use_short_filename = True
    config_number_of_threads = CONFIG_THREADS

    def __init__(self, extra_opts, mount_point, *args, **kw):
        # Set up logging
        self.logstream = StringIO.StringIO()
        _FSlog = logging.StreamHandler(self.logstream)
        _FSlog.setLevel(logging.DEBUG)
        logging.getLogger('').addHandler(_FSlog)
        self.log = logging.getLogger('picasafs')
        self.log.info('__init__ing Fuse')
        self.mount_point = mount_point
        try:
            # Looking for a default username:password file
            creds = file(os.path.expanduser('~/.picasapass'))
            n = creds.readline().strip() #newline separated fields
            p = creds.readline().strip()
            creds.close()
        except IOError:
            pass
        if extra_opts.has_key('credentials_file'):
            # Looking for a given username:password file
            creds = file(os.path.expanduser(extra_opts['credentials_file']))
            n = creds.readline().strip()
            p = creds.readline().strip()
            creds.close()
        if extra_opts.has_key('username'):
            n = extra_opts['username']
        if extra_opts.has_key('password'):
            p = extra_opts['password']
        self.picasaweb = gdata.photos.service.PhotosService(
            email=n,
            password=p,
            source='lurtgjort.no-PicasaFS-%s' % __version__)
        self.picasaweb.ProgrammaticLogin()
        self.get_albums()
        fuse.Fuse.__init__ ( self, mount_point, *args, **kw )
        # Get options
        if extra_opts.has_key('short_filenames'):
            self.config_use_short_filename = extra_opts['short_filenames'] in TRUE
        if extra_opts.has_key('threads'):
            try:
                self.config_number_of_threads = int(extra_opts['threads'])
            except TypeError:
                pass
        # Start threading pool
        self.queue = Queue.Queue()
        self.threads = []
        for i in range(self.config_number_of_threads):
            t = request_thread(self.queue)
            t.setDaemon(True)
            t.start()
            self.threads.append(t)
        print self.threads

    def prune_cache(self, level=None):
        """Delete the cache of inodes and files at the given level; defaults to everything"""
        self.log.debug('Pruning cache: %s' % level)
        if level is None: # delete everything
            self._inode_cache = {}
            #self._file_cache = {}
        else:
            for cache in (self._inode_cache, self._file_cache):
                for k in cache.keys():
                    if k.startswith('/'+level):
                        try:
                            cache.pop(k)
                        except KeyError:
                            self.log.error("The cache is confused. Where is '%s' in %s?" % (repr(k), repr(cache)))

    def get_albums(self, force_fetch=False):
        if force_fetch: self.prune_cache()
        if force_fetch or self._albums_last_seen + CACHE_TIMEOUT < time.time():
            self.prune_cache()
            self._userFeed = self.picasaweb.GetUserFeed(kind='album')
            for a in self._userFeed.entry:
                self._albums[a.title.text] = a
            self._albums_last_seen = time.time()
        return self._albums

    def get_photos(self, album_name, force_fetch=False):
        album = self.get_albums(force_fetch=force_fetch)[album_name]
        if force_fetch: self.prune_cache(album_name)
        if force_fetch or not self._photos.has_key(album.title.text) or \
            self._photos_last_seen[album.title.text] + CACHE_TIMEOUT < time.time():
            _seen_filenames = []
            self._photoFeed[album.title.text] = \
                self.picasaweb.GetFeed(album.GetPhotosUri())
            _photos = []
            for p in self._photoFeed[album.title.text].entry:
                filename = make_easy_path(p.summary.text)
                if filename is None or self.config_use_short_filename:
                    filename = make_easy_path(p.title.text)
                self.log.debug('Looking at photo: %s' % filename)
                while filename in _seen_filenames:
                    try:
                        delim = filename.rfind('_')
                        idx = int(filename[delim+1:])
                        filename = '%s_%s' % (filename[:delim], idx+1)
                    except ValueError:
                        filename = '%s_1' % filename

                _photos += [ (filename, p)  ]
                _seen_filenames.append(filename)
            self._photos[album.title.text] = _photos
            self._photos_last_seen[album.title.text] = time.time()

        return self._photos[album.title.text]

    def get_photo_desktopentry(self, photo_filename, album_name, force_fetch=False):
        photo = self.get_photo(album_name, photo_filename, force_fetch=force_fetch)
        k=photo.GetFeedLink().href
        self._photo_desktopentry[k] = StringIO.StringIO()
        utf8_w = codecs.getwriter('utf8')(self._photo_desktopentry[k]) ## making sure buffer is valid utf8
        photo_repr = photo.media.content[0] # TODO: iterate and find content.type = 'jpeg'
        thumb = get_smallest_thumbnail(photo.media.thumbnail)
        thumb_path = '%s/%s/%s/thumb_%sx%s.jpg' % (self.mount_point, album_name, photo_filename, thumb.width, thumb.height)
        utf8_w.write("""[Desktop Entry]
Encoding=UTF-8
Name=%s
Icon=%s
""" % (parse_text(photo.summary), thumb_path))
        return self._photo_desktopentry[k]

    def get_photo_metadata(self, photo_filename, album_name, force_fetch=False):
        photo = self.get_photo(album_name, photo_filename, force_fetch=force_fetch)
        k=photo.GetFeedLink().href
        self._photo_metadata[k] = StringIO.StringIO()
        if photo.rotation is None: rot = '0'  ## gphoto:rotation is not always set. <PWS BUG>
        else: rot = photo.rotation.text
        utf8_w = codecs.getwriter('utf8')(self._photo_metadata[k]) ## making sure buffer is valid utf8
        photo_repr = photo.media.content[0] # TODO: iterate and find content.type = 'jpeg'
        try:
          where = '%s x %s' % photo.geo.location()
        except TypeError:
          where = 'lat x lon'
        #if photo.exif is None:
            #photo.exif = gdata.exif.Tags()
        utf8_w.write(""";Photo details for %s:
;
;---------
[Editable]
;A short picture name
Filename=%s
;
;A verbose description of the picture
Description=%s
;
;Keywords (a.k.a. tags) are comma-separated words
Keywords=%s
;
;Date taken, in the format YYYY-MM-DD HH:mm:SS (UTC timezone)
Date taken=%s
;
;Rotation of image on display. Degrees clockwise
Rotation=%s
;
;Unique value to prevent duplicate uploads
Checksum=%s
;
;Lat x Lon GPS coordinates of picture. WGS84 Datum
; format: lat x lon, e.g. '12.0 x -55.0'
Where=%s
;
;Are others allowed to add comments to the picture? True/False
Comments allowed=%s
;
;The name of the program, default PicasaFS
Upload program=%s
;
;---------
[Camera setup (read-only)]
Make=%s
Model=%s
Date=%s
ISO=%s
Distance=%s
Exposure=%s
Flash fired=%s
Focal length=%s
F-Stop=%s
Unique ID=%s
;---------
[Information (read-only)]
Dimensions=%sx%s (%s bytes)
Uploaded by=%s
Date uploaded=%s
Url=%s
File type=%s
No. of comments=%s
""" % (# Editable
       parse_text(photo.title), parse_text(photo.title),
       parse_text(photo.summary), parse_text(photo.media.keywords),
       make_human_date(photo.timestamp.text), rot, parse_text(photo.checksum),
       where, photo.commentingEnabled.text, parse_text(photo.client),
       # EXIF
       parse_text(photo.exif.make), parse_text(photo.exif.model),
       photo.exif.time and photo.exif.time.isoformat(), parse_text(photo.exif.iso),
       parse_text(photo.exif.distance), parse_text(photo.exif.exposure),
       parse_text(photo.exif.flash), parse_text(photo.exif.focallength),
       parse_text(photo.exif.fstop), parse_text(photo.exif.imageUniqueID),
       # Static
       photo.width.text, photo.height.text, photo.size.text,
       string.join([parse_text(a.name) for a in photo.author], ', '),
       photo.published.text.replace('T', ' '),
       photo_repr.url, photo_repr.type, photo.commentCount.text))
        return self._photo_metadata[k]

    def get_usertags(self, force_fetch=False):
        if force_fetch or self._usertags_last_seen + CACHE_TIMEOUT < time.time():
            self._usertagFeed = self.picasaweb.GetUserFeed(kind='tag')
            self._usertags = [ (t.title.text, t) for t in self._usertagFeed.entry ]
            self._usertags_last_seen = time.time()
        return self._usertags

    def get_photo(self, album_name, photo_name, force_fetch=False):
        for _photo_name, _photo in self.get_photos(album_name, force_fetch):
            if _photo_name == photo_name: return _photo
        return None

    def get_album_tags(self, album_name, force_fetch=False):
        album = self._albums[album_name]
        if force_fetch or not self._tags.has_key(album.title.text) or \
            self._tags_last_seen[album.title.text] + CACHE_TIMEOUT < time.time():
            self._tagFeed[album.title.text] = \
                self.picasaweb.GetFeed(album.GetTagsUri())
            self._tags[album.title.text] = \
                [ (t.title.text, t) for t in self._tagFeed[album.title.text].entry ]
            self._tags_last_seen[album.title.text] = time.time()
        return self._tags[album.title.text]

    def get_comments(self, photo, force_fetch=False):
        if force_fetch or not self._comments.has_key(photo.id.text) or \
            self._comments_last_seen[photo.id.text] + CACHE_TIMEOUT < time.time():
            self._commentFeed[photo.id.text] = \
                self.picasaweb.GetFeed(photo.GetCommentsUri())
            self._comments[photo.id.text] = {}
            for c in self._commentFeed[photo.id.text].entry:
                self._comments[photo.id.text][c.GetCommentId()] = c
            self._comments_last_seen[photo.id.text] = time.time()
        return self._comments[photo.id.text]

    def get_inode(self, path):
        self.log.debug('Looking for %s in cache: %s' % (repr(path), self._inode_cache))
        if not self._inode_cache.has_key(path):
            i = self._inode(path)
            if i is None:
                self.log.debug(" no Inode for path: %s" % path)
                return None
            self._inode_cache[path] = i
        return self._inode_cache[path]

    def _inode(self, path):
        self.log.debug("p:"+path)
        ## in cache (created but not uploaded)?
        if self._file_cache.has_key(path):
            self.log.debug("in _file_cache: %s" % path)
            return FileInode(path=path)
        _path = [ x for x in path.split(os.path.sep) if x != '' ]
        self.log.debug("_inode decoded path: %s" % _path)
        photo = None
        if len(_path) == 0: ## 'root dir'
            return DirInode(path=path)
        elif _path[-1] in LOG_FILES: ## error log / debug log
            return FileInode(path=path, mode=0444, size=self.logstream.len)
        elif len(_path) == 1: ## 'album dir' / TAGS / COMMUNITY /
            self.log.debug(' _inode album: %s' % _path[0])
            if _path[0] == 'TAGS': ## requesting all tags for user
                return DirInode(path=path, mode=0555)
            elif _path[0] == 'COMMUNITY': ## doing community search
                return DirInode(path=path, mode=0777)
            self.log.debug("Returning album: %s" % repr(self.get_albums()[_path[0]]))
            return AlbumInode(self.get_albums()[_path[0]], path)
        elif len(_path) == 2: ## 'photo dir' / UPLOADS / TAGS
            if _path[1] == 'TAGS': ## requesting all tags in album
                return DirInode(path=path, mode=0555)
            elif _path[0] == 'TAGS': ## requesting all tags for user
                return DirInode(path=path, mode=0555)
            elif _path[1] in UPLOAD_DIRS: ## doing an upload
                return DirInode(path=path, mode=0755)
            # if we've come this far, we're looking at an image
            photo = self.get_photo(_path[0], _path[1])
            if photo is None: raise KeyError # photo/file not found
            return PhotoInode(photo, album=self.get_albums()[_path[0]], path=path)
        elif len(_path) == 3: ## 'photo attributes', UPLOADS, COMMENTS or TAGS
            if _path[2] in UPLOAD_DIRS: # copying file to /album/UPLOAD_xxx/.
                return FileInode(path=path, mode=0644)
            photo = self.get_photo(_path[0], _path[1])
            base, ext = os.path.splitext(_path[1])
            #if _path[2].startswith('%s_original' % base): ## requesting image original size
            if _path[2].startswith('original'): ## requesting image original size
                return PhotoAttrInode(photo, path=path, size=long(photo.size.text))
            #elif _path[2].startswith('%s_thumbnail_' % base): ## requesting a thumbnail
            elif _path[2].startswith('thumb_'): ## requesting a thumbnail
                try:
                    w,h = os.path.splitext(_path[2][_path[2].rfind('_')+1:])[0].split('x')
                    return PhotoAttrInode(photo, path=path, size=long(int(w)*int(h)*8), mode=0444)
                except ValueError:
                    pass
            #elif _path[2] == '%s_info.txt' % base: ## requesting text file with photo metadata
            elif _path[2] == '.directory': ## requesting text file with photo metadata
                #pdb.set_trace()
                metadata = self.get_photo_desktopentry(_path[1], _path[0])
                return FileInode(path=path,mode=0444,size=long(metadata.len))
            elif _path[2] == 'info.txt': ## requesting text file with photo metadata
                #pdb.set_trace()
                metadata = self.get_photo_metadata(_path[1], _path[0])
                return PhotoAttrInode(photo, path=path,mode=0644,size=long(metadata.len))
            elif _path[2] == 'COMMENTS': ## requesting photo comments
                if self.get_albums()[_path[0]].commentingEnabled.bool: mode=0777
                else: mode=0755
                return DirInode(path=path, mode=mode)
            elif _path[2] == '%s_tags.txt' % base: ## requesting photo tags
                try: _size=len(photo.media.keywords.text)
                except TypeError: _size=0L
                return PhotoAttrInode(photo, path, size=_size)
            elif _path[2] in METADATA_FILES.keys():
                timestamp = int(photo.timestamp.text)/1000
                if METADATA_FILES[_path[-1]] == 'description':
                    _size = photo.summary.text and long(len(photo.summary.text))
                elif METADATA_FILES[_path[-1]] == 'title':
                    _size = photo.title.text and long(len(photo.title.text))
                elif METADATA_FILES[_path[-1]] == 'keywords':
                    _size = photo.media.keywords.text and long(len(photo.media.keywords.text))
                elif METADATA_FILES[_path[-1]] == 'date':
                    _size = photo.timestamp.text and long(len(make_human_date(photo.timestamp.text)))
                elif METADATA_FILES[_path[-1]] == 'rotation':
                    _size = 1L
                elif METADATA_FILES[_path[-1]] == 'checksum':
                    #pdb.set_trace()
                    _size = photo.checksum.text and long(len(photo.checksum.text))
                elif METADATA_FILES[_path[-1]] == 'where':
                    try:
                        _size = long(len('%s x %s' % photo.geo.location()))
                    except TypeError:
                        _size = long(len('lat x lon'))
                elif METADATA_FILES[_path[-1]] == 'client':
                    _size = photo.client.text and long(len(photo.client.text))
                if _size is not None:
                    _size += 1L # add newline char
                return PhotoAttrInode(photo, path, size=_size or 0L)

        elif len(_path) == 4: ## COMMENTS
            self.log.debug(" returning _inode for comment: %s" % path)
            if _path[2] == 'COMMENTS': ## requesting a photo comment, /commentid/.txt
                photo = self.get_photo(_path[0], _path[1])
                comment_id = self._comment_id_to_pretty_name_map[_path[3]]
                comment = self.get_comments(photo)[comment_id]
                return CommentInode(comment, path=path)
            else:
                log.error("Don't know what to do with path: %s" % _path)
        else:
            log.error("retr none _inode")
            return None

    def chmod ( self, path, mode ):
        return -fuse.EOPNOTSUPP

    def chown ( self, path, uid, gid ):
        return -fuse.EOPNOTSUPP

    def fsync ( self, path, isFsyncFile ):
        pass

    def getattr ( self, path ):
        try:
            inode=self.get_inode(path)
            self.log.debug("inode "+str(inode))
        except KeyError: # no such path
            inode = False
        if not inode:
            self.log.error('No such file: %s' % path)
            e = OSError("No such file or dir: "+path)
            e.errno = fuse.ENOENT
            raise e
        statTuple = (inode.mode,inode.ino,inode.dev,inode.nlink,
                inode.uid,inode.gid,inode.size,inode.atime,inode.mtime,inode.ctime)
        return statTuple

    def getdir ( self, path ):
        self.log.debug("getdir "+path)
        _path = [ x for x in path.split(os.path.sep) if x != '' ]
        self.log.debug("%s" % _path)
        everywhere = [ (n,0) for n in LOG_FILES+('.','..') ]
        special_dirs = [ (n,0) for n in UPLOAD_DIRS+TAG_DIR ] + everywhere
        if len(_path) == 0: ## 'root dir' -> listing all albums and TAGS
            return [ (x, 0) for x in self.get_albums().keys() ] + everywhere
        elif len(_path) == 1: ## 'album dir' -> listing all photos, upload targets and TAGS
            self.log.debug(' getdir album: %s' % _path[0])
            if _path[0] == 'TAGS':
                return [ (x,0) for x,tag in self.get_usertags() ]
            return [ (p[0], 0) for p in self.get_photos(_path[0])]+special_dirs
        #elif len(_path) == 2 and _path[0] == 'TAGS':## User tag feed

        elif len(_path) == 2: ## 'photo dir' -> photo attributes, UPLOADS and TAGS
            self.log.debug(' getdir photo: %s' % _path[1])
            if _path[1] == 'TAGS':
                return [ (tag_title, 0) for tag_title, tag in self.get_album_tags(_path[0]) ] + everywhere
            if _path[1] in UPLOAD_DIRS: # listing an upload dir
                return [('.',0),] # upload dirs are always empty
            photo = self.get_photo(_path[0], _path[1])
            base, ext = os.path.splitext(_path[1])
            ext = FILETYPE2EXT[photo.media.content[0].type]
            #r = [('.',0),('..',0),]
            r = everywhere
            r += [('.directory', 0)]
            r += [('info.txt', 0)]
            r += [('original.%s' % ext, 0)]
            for t in photo.media.thumbnail:
                r += [('thumb_%sx%s.%s' % (t.width, t.height, ext), 0)]
            r += [('COMMENTS', 0)]
            #r += [('%s_tags.txt' % base, 0)]
            r += [ (filename, 0) for filename in METADATA_FILES.keys() ]

            return r
        elif len(_path) == 3: ## COMMENTS
            if _path[2] == 'COMMENTS':
                self.log.debug(" getdir comments")
                photo = self.get_photo(_path[0], _path[1])
                r = [('.',0),('..',0),]
                for i, comment in enumerate(self.get_comments(photo).values()):
                    pretty = '%s_%s.txt' % (i, comment.author[0].user.text.encode('utf-8'))
                    r += [(pretty, 0)]
                    self._comment_id_to_pretty_name_map[pretty] = comment.GetCommentId()
                return r

    #def link ( self, targetPath, linkPath ):
        #self.log.debug("link", path)

    def mkdir ( self, path, mode ):
        self.log.debug("mkdir "+str(mode)+str(path))
        _path = [ x for x in path.split(os.path.sep) if x != '' ]
        if len(_path) == 1: ## 'root' level -> creating new album
            albumname = _path[0].decode('utf8')
            if albumname.startswith('.'):
                return -fuse.EOPNOTSUPP # hidden files not allowed
            self.queue.put((self.upload_album, (albumname, 'Created with PicasaFS',), {}))
            self.queue.join()
            #self.queue.put((self.get_albums, (), {'force_fetch':True})) # update
            #self.queue.join()
            #self.log.debug("%s mkdired"%path)
            return
        else: # photos and tags and comments are created as files, not dirs
            return -fuse.EOPNOTSUPP

    ## WE DON'T IMPLEMENT THE CREATE PROTOCOL (WE'RE USING THE OLDER MKNOD+OPEN PROTOCOL INSTEAD)
    #def create (self, *args):
        #self.log.debug("create %s -- %s (%s) -- %s " % (path, mode, oct(mode), dev))

    def mknod ( self, path, mode, dev ):
        # The file type must be one of S_IFREG, S_IFCHR, S_IFBLK, S_IFIFO or S_IFSOCK to
        # specify a normal file (which will be created empty), character special file, block
        # special file, FIFO (named pipe), or Unix domain socket, respectively.
        # (Zero file type is equivalent to type S_IFREG.)
        # -- MKNOD -- Linux Programmer's Manual (2)
        self.log.debug("mknod %s -- %s (%s) -- %s " % (path, mode, oct(mode), dev))
        if not stat.S_ISREG(mode): #not a regular file
            self.log.debug("mknod: not regular file: %s -- %s" % (path, mode))
            return -fuse.EOPNOTSUPP
        _path = [ x for x in path.split(os.path.sep) if x != '' ]
        if not len(_path) > 1 or \
            not self._inode_cache.has_key(os.path.split(path)[0]) or \
            not (_path[-2] in UPLOAD_DIRS or len(_path) == 2 or _path[-2] == 'COMMENTS'): # path is not in upload dir
            # The target is neither /album/. nor /album/UPLOAD_xxx/.
            return -fuse.EOPNOTSUPP
        self._file_cache[path] = StringIO.StringIO()
        self._file_cache[path].close()
        return 0

    #def flush( self, path, info_struct):
       #"""This function is called to let the file system clean up any data buffers and to pass any errors in the process of closing a file to the user application.

        #http://fuse.sourceforge.net/wiki/index.php/FUSE%20tutorial
       #"""
       #self.log.debug("flush")

    def open ( self, path, flags ):
        self.log.debug("open %s -- %s "% (path, oct(flags)))
        self._file_cache[path] = StringIO.StringIO()
        return 0

    def read ( self, path, length, offset ):
        """

        Take care not to return unicode objects!
        """
        self.log.debug("read path: %s, len: %s, offset: %s " % (path, length, offset))
        _path = [ x for x in path.split(os.path.sep) if x != '' ]
        if _path[-1] in LOG_FILES: ## reading the debug log
            return self.logstream.getvalue()[offset:length]
            #_log = StringIO.StringIO(self.logstream.getvalue()) ## get value of log w/o ..
            #_log.seek(offset)                                   ## .. moving the offset
            #return _log.read(length)
        base, ext = os.path.splitext(_path[-2])
        self.log.debug("base: %s" % base)
        photo = self.get_photo(_path[0], _path[1]) # returns None if no photo found

        if _path[-1].endswith('_tags.txt'): ## reading a TAGS _file_ for a specific image
            f = StringIO.StringIO()
            f.write(photo.media.keywords.text)
            return f.getvalue()#read(length)
        elif len(_path) == 4 and _path[2] == 'COMMENTS': ## reading a comment
            # the comment id and author name is used as filename and
            # internal comments key
            comment_id = self._comment_id_to_pretty_name_map[_path[3]]
            comment = self.get_comments(photo)[comment_id]
            try:
                return comment.content.text
            except AttributeError: # Api is confused, use summary element instead of content
                return comment.summary.text
        elif _path[-1] in METADATA_FILES.keys():
            _f = StringIO.StringIO()
            f = codecs.getwriter('utf8')(_f) ## making sure buffer is valid utf8
            #pdb.set_trace()
            if METADATA_FILES[_path[-1]] == 'description':
                f.write(parse_text(photo.summary))
            elif METADATA_FILES[_path[-1]] == 'title':
                f.write(parse_text(photo.title))
            elif METADATA_FILES[_path[-1]] == 'keywords':
                f.write(parse_text(photo.media.keywords))
            elif METADATA_FILES[_path[-1]] == 'date':
                f.write(make_human_date(photo.timestamp.text))
            elif METADATA_FILES[_path[-1]] == 'rotation':
                f.write(photo.rotation.text or 0)
            elif METADATA_FILES[_path[-1]] == 'checksum':
                f.write(parse_text(photo.checksum))
            elif METADATA_FILES[_path[-1]] == 'where':
                try:
                    f.write('%s x %s' % photo.geo.location())
                except TypeError:
                    f.write('lat x lon')
            elif METADATA_FILES[_path[-1]] == 'client':
                f.write(parse_text(photo.client))
            f.write('\n')
            f.seek(offset)
            return f.read(length)
        elif len(_path) == 3 and _path[2] == '.directory':
            self.log.debug("reading photo desktopentry")
            f = self.get_photo_desktopentry(_path[1], _path[0])
            f.seek(offset)
            return f.read(length)
        else: ## reading an image file
            if _path[-1] == 'info.txt': ## requesting text file with photo metadata
                self.log.debug("reading photo metadata")
                f = self.get_photo_metadata(_path[1], _path[0])
                f.seek(offset)
                return f.read(length)
            if _path[-1].startswith('thumb_'): ## requesting a thumbnail
                w,h = os.path.basename(_path[-1][_path[-1].rfind('_')+1:]).split('x')
                for t in photo.media.thumbnail:
                    if t.width == w: mediaURL = t.url
            #if _path[-1].startswith('%s_original' % base): ## requesting image original size
            else:
                mediaURL = photo.content.src
            media = self.picasaweb.GetMedia(mediaURL)
            f = StringIO.StringIO(media.file_handle.read())
            f.seek(offset)
            return f.read(length)

    def readlink ( self, path ):
        self.log.debug("readlink %s " % path)

    def release ( self, path, flags ):
        # maybe do upload of path
        # flags
        # 0x8000 - file is fully written
        # 0x8001 - file was copied  (e.g. `cp')
        # 0x1    - file was written (e.g. `vim')
        # 0x0    - empty file (e.g. from `touch')
        self.log.debug("release/upload %s -- %s " % (path, oct(flags)))
        if flags == 0x8000: return # the file is not yet fully written
        _path = [ x for x in path.split(os.path.sep) if x != '' ]
        album = self.get_albums()[_path[0]]
        try: photo = self.get_photo(_path[0], _path[1])
        except IndexError: pass
        valid_image = looks_like_valid_image(path, self._file_cache[path].getvalue())
        self.log.debug(' looks like valid image: %s',valid_image)
        if not valid_image: # not an image, so it must be text
            text_encoding = get_text_encoding(self._file_cache[path])
            self.log.debug("Found encoding of text file: %s" % text_encoding)
            if not text_encoding:
                text_encoding = 'utf8' # default
            if _path[-1].endswith('_tags.txt') and len(_path) == 3: # writing tags, comma-separated text
                for tag in self._file_cache[path].getvalue().split(','):
                    if not tag.strip(): continue
                    self.log.debug("uploading tag: %s" % tag)
                    #t = self.picasaweb.InsertTag(photo, tag.strip().decode(text_encoding))
                    #self.log.debug(t)
                    self.queue.put((self.picasaweb.InsertTag, (photo, tag.strip().decode(text_encoding)), {}))
                self.queue.join()
                self.queue.put((self.get_albums, (), {'force_fetch':True})) # update
                self.queue.join()
                return
            elif _path[-1] in METADATA_FILES.keys():
                newvalue = self._file_cache[path].getvalue().decode(text_encoding)
                update_needed = False
                old_photo = photo
                photo = gdata.photos.PhotoEntry()
                photo.link.append(old_photo.GetEditLink())
                photo.link.append(old_photo.GetPostLink())
                photo.link.append(old_photo.GetSelfLink())
                if METADATA_FILES[_path[-1]] == 'description':
                    photo.summary = atom.Summary(text=newvalue)
                    update_needed = True
                elif METADATA_FILES[_path[-1]] == 'title':
                    photo.title = atom.Title(text=newvalue)
                    update_needed = True
                elif METADATA_FILES[_path[-1]] == 'keywords':
                    return self.update_tags(old_photo, newvalue.split(','))
                elif METADATA_FILES[_path[-1]] == 'date':
                    t = str(parse_human_date_to_milliepoch(newvalue))
                    photo.timestamp = gdata.photos.Timestamp(text=t)
                    update_needed = True
                elif METADATA_FILES[_path[-1]] == 'rotation':
                    photo.rotation = gdata.photos.Rotation(text=newvalue)
                    update_needed = True
                elif METADATA_FILES[_path[-1]] == 'checksum':
                    photo.checksum = gdata.photos.Checksum(text=newvalue)
                    update_needed = True
                elif METADATA_FILES[_path[-1]] == 'where':
                    photo.geo = gdata.geo.Where()
                    try:
                        photo.geo.set_location( tuple(float(s.strip()) for s in newvalue.split('x')) )
                    except:
                        self.log.debug('Skipping unparsable GPS value: %s' % newvalue)
                    update_needed = True
                elif METADATA_FILES[_path[-1]] == 'client':
                    photo.client.text = gdata.photos.Client(text=newvalue)
                    update_needed = True
                if update_needed:
                    self.queue.put((self.update_photo, (photo, _path[0]), {}))
                    self.queue.join()
                        #try:
                        #updatedphoto = self.picasaweb.UpdatePhotoMetadata(photo)

                        #self.log.info('photo updated; %s' % path)
                    #except gdata.service.RequestError, e:
                        #self.log.error('Update failed: %(body)s (%(status)s %(reason)s)' % e.message)
                    #self.queue.join()
                    #self.queue.put((self.get_albums, (), {'force_fetch':True})) # update
                    #self.queue.join()               #self.get_photos(_path[0], force_fetch=True)
                return 0
            elif _path[-2] == 'COMMENTS' and len(_path) == 4: # getting a comment: text
                comment = self._file_cache[path].getvalue().strip()
                if not comment: return False
                self.log.debug("uploading comment: %s" % comment)
                self.queue.put((self.upload_comment, (photo, comment.decode(text_encoding)), {}))
                #c = self.picasaweb.InsertComment(photo, comment.decode(text_encoding))
                #self._comments_last_seen[_path[1]] = -1 # pinging comment cache
                #self.get_comments(photo)
                return True
            elif flags in (0x1, 0x8001) and _path[-1] == 'info.txt':
                ## image metadata: ConfigParser text
                config = ConfigParser.ConfigParser()
                self._file_cache[path].seek(0)
                config.readfp(self._file_cache[path])
                _tags = []
                try:
                    for key, value in config.items('Editable'):
                        value = value.decode(text_encoding)
                        self.log.debug('updating meta: %s > %s' % (key, value))
                        if key == 'filename': photo.title.text = value
                        elif key == 'checksum': photo.checksum.text = value
                        elif key == 'date taken': photo.timestamp.text = str(parse_human_date_to_milliepoch(value))
                        elif key == 'comments allowed': photo.commentingEnabled.text = value
                        elif key == 'description': photo.summary.text = value
                        elif key == 'keywords': _tags = [s.strip() for s in value.split(',') if s]
                        elif key == 'upload program':
                          if not value: value = 'PicasaFS'
                          photo.client.text = value
                        elif key == 'rotation':
                          try:photo.rotation.text = value
                          except AttributeError: photo.rotation = gdata.photos.Rotation(text=value)
                        elif key == 'where':
                          try:
                            photo.geo = gdata.geo.Where()
                            photo.geo.set_location( tuple(float(s.strip()) for s in value.split('x')) )
                          except:
                            self.log.debug('Skipping unparsable GPS value: %s' % value)
                except ConfigParser.NoSectionError:
                    config.error('Invalid info file! %s' % path)
                    return False
                #photo.exif = None # zap read-only data to save bandwidth
                self.queue.put((self.update_photo, (photo, _path[0]), {}))
                self.queue.put((self.update_tags, (photo, _tags), {}))
                self.queue.join()
                return 0
                #self.queue.put((self.picasaweb.UpdatePhotoMetadata, (photo,), {}))
                #self.queue.join()
                #self.queue.put((self.get_albums, (), {'force_fetch':True})) # update
                #self.queue.join()               #self.get_photos(_path[0], force_fetch=True)
                #try:
                    #photo.exif = None # zap read-only data to save bandwidth
                    #updatedphoto = self.picasaweb.UpdatePhotoMetadata(photo)

                    #self.log.info('photo updated; %s' % path)
                    #self.update_tags(photo, _tags)
                #except gdata.service.RequestError, e:
                    #self.log.error('Update failed: %(body)s (%(status)s %(reason)s)' % e.message)
                ##self.log.debug(c)
                #self.get_photos(_path[0], force_fetch=True)
                return True
            elif flags == 0x0:
                # empty file commited, e.g. from 'touch' or closing a file without changes (vim)
                self.log.info('Empty file detected and overlooked')
                return False
            else:
                self.log.error('Unknown error')
                return -fuse.EOPNOTSUPP

        ## By now we know that we're dealing with an image
        if _path[1] in UPLOAD_DIRS and \
            not looks_like_valid_image(path, self._file_cache[path].getvalue()): # not a valid image / no file extension?
            self.log.error("This does not look like an image: %s -- %s" % (path, oct(flags)))
            return -fuse.EOPNOTSUPP
        if USE_IMAGING and _path[-2] in UPLOAD_DIRS: # maybe resize the image
            try:
                w,h = [ int(i) for i in _path[1][_path[1].rfind('_')+1:].split('x') ]
                self._file_cache[path].seek(0) # rewind buffer
                self.log.info("Downsizing %s to %s" % (path, (w,h)))
                photo, resized = downsize_if_bigger((w,h), self._file_cache[path]) # get photo from cache
                if resized: #save resized photo to cache
                    self._file_cache[path] = photo
            except ValueError:
                self.log.info('Not resizing image (%s)' % path)
        ## do the image upload
        self._file_cache[path].seek(0) # rewind buffer
        album = self.get_albums()[_path[0]]
        self.log.debug('Going for upload of %s' % path)
        self.queue.put((self.upload_photo, (_path[0], album, make_easy_path(_path[-1].decode('utf8')),
                        _path[-1].decode('utf8'), self._file_cache[path], 'PicasaFS'), {}))
        self.queue.join()
        return 0
        ##pdb.set_trace()
        #self.queue.put((self.picasaweb.InsertPhotoSimple, (album,),
            #{
                #'title':make_easy_path(_path[-1].decode('utf8')),
                #'summary':_path[-1].decode('utf8'),
                #'filename_or_handle':self._file_cache[path], # get photo from cache
                #'keywords':'PicasaFS'
            #}
            #))
        #self.queue.join()
        #self.queue.put((self.get_albums, (_path[0],), {'force_fetch':True})) # update
        #self.queue.join()               #self.get_photos(_path[0], force_fetch=True)
        #return 0
        #try:
            #self._file_cache[path].seek(0) # rewind buffer
            #album = self.get_albums()[_path[0]]
            #self.log.debug('Going for upload of %s' % path)
            ##background(self.uploadimage,
                       ##_path[0],
                       ##make_easy_path(_path[-1].decode('utf8')),
                       ##_path[-1].decode('utf8'),
                       ##self._file_cache[path],
                       ##'PicasaFS')
            ##m = self.picasaweb.InsertPhotoSimple(album,
                ##title=make_easy_path(_path[-1].decode('utf8')),
                ##summary=_path[-1].decode('utf8'),
                ##filename_or_handle=self._file_cache[path], # get photo from cache
                ##keywords='PicasaFS')
            ##self.log.debug(m)
        #except KeyError: ## no such album or file cache
            #self.log.error("Image upload failed! bummer")
            #return -fuse.EREMOTEIO
        #finally:
            #self.log.info("File successfully uploaded to %s" % path)
            #self.get_photos(_path[0], force_fetch=True) # update

    def rename ( self, oldpath, newpath ):
        self.log.debug("rename %s -> %s" % (oldpath, newpath))
        # We only allow photos to be renamed.
        _opath = [ x for x in oldpath.split(os.path.sep) if x != '' ]
        _npath = [ x for x in newpath.split(os.path.sep) if x != '' ]
        self.log.debug(_opath, _npath)
        if len(_opath) != 2 or len(_npath) != 2: # we only support renaming of photo titles,
            return -fuse.EOPNOTSUPP              # not albums
        album = self.get_albums()[_opath[0]]
        photo = self.get_photo(_opath[0], _opath[1])
        self.log.debug(type(_npath[1]), repr(_npath[1]))
        photo.summary.text = make_easy_path(_npath[1])
        #photo.summary.text = _npath[1]
        photo.exif = None
        self.queue.put((self.update_photo, (photo, _opath[0]), {}))
        self.queue.join()
        return
        #self.queue.join()
        #self.queue.put((self.get_albums, (), {'force_fetch':True})) # update
        #self.queue.join()               #self.get_photos(_path[0], force_fetch=True)
        #self.picasaweb.UpdatePhotoMetadata(photo)
        ## fixing status
        ##self._photos_last_seen[_opath[0]] = -1 # pinging photo cache
        #self.get_photos(_opath[0], force_fetch=True)
        self.log.info("Image successfully renamed from %s to %s" % (oldpath, newpath))

    def rmdir ( self, path ):
        self.log.debug("rmdir")
        _path = [ x for x in path.split(os.path.sep) if x != '' ]
        if _path[-1] in ('TAGS', 'COMMENTS')+UPLOAD_DIRS: ## special dirs
            return #-fuse.EOPNOTSUPP
        if len(_path) == 1: ## 'root' level -> removing album
            try:
                album = self.get_albums(force_fetch=True)[_path[0]] # always refresh
            except KeyError:
                e = OSError("No such file or dir: "+path)
                e.errno = fuse.ENOENT
                raise e

            if int(album.numphotos.text) > 0: # never touch albums with photos
                return -fuse.ENOTEMPTY
            self.queue.put((self.picasaweb.Delete, (album.GetEditLink().href,), {}))
            #self.picasaweb.Delete(album.GetEditLink().href)
            self.log.info("Album deleted (was: %s)" % path)
            del self._albums[_path[0]] #hack to quickly return
            self._albums_last_seen = -1
        elif len(_path) == 2: ## 'album level' -> removing photo
            for _name, _photo in self.get_photos(_path[0]):
                if _name == _path[1]: photo = _photo
            self.queue.put((self.picasaweb.Delete, (photo.GetEditLink().href,), {}))
            #self.picasaweb.Delete(photo.GetEditLink().href)
            self._photos[_path[0]].remove((_name, _photo)) #hack to quickly return
            self._photos_last_seen[_path[0]] = -1
            self.log.info("Image deleted (was: %s)" % path)
        else: # photos and tags and comments are created as files, not dirs
            return -fuse.EOPNOTSUPP

    def statfs ( self ):
        """
        To provide usable information (ie., you want sensible df(1)
        output, you are suggested to specify the following attributes:

            - f_bsize - preferred size of file blocks, in bytes
            - f_frsize - fundamental size of file blcoks, in bytes
                [if you have no idea, use the same as blocksize]
            - f_blocks - total number of blocks in the filesystem
            - f_bfree - number of free blocks
            - f_files - total number of file inodes
            - f_ffree - nunber of free file inodes
        Should return a tuple with the following elements in respective order:

        F_BSIZE - Preferred file system block size. (int)
        F_FRSIZE - Fundamental file system block size. (int)
        F_BLOCKS - Total number of blocks in the filesystem. (long)
        F_BFREE - Total number of free blocks. (long)
        F_BAVAIL - Free blocks available to non-super user. (long)
        F_FILES - Total number of file nodes. (long)
        F_FFREE - Total number of free file nodes. (long)
        F_FAVAIL - Free nodes available to non-super user. (long)
        F_FLAG - Flags. System dependent: see statvfs() man page. (int)
        F_NAMEMAX - Maximum file name length. (int)
        Feel free to set any of the above values to 0, which tells
        the kernel that the info is not available.
        """
        self.log.debug("statfs")
        self.get_albums(force_fetch=False) # make sure cached values are ok
        F_BSIZE = 1024
        F_FILES = 0L
        F_FFREE = 0L
        F_NAMEMAX = 255
        quotacurrent = long(self._userFeed.quotacurrent.text)
        quotalimit = long(self._userFeed.quotalimit.text)
        _blocks = quotalimit / F_BSIZE
        _bfree = (quotalimit - quotacurrent) / F_BSIZE
        _bavail = _bfree
        return (F_BSIZE, _blocks, _bfree, _bavail,  F_FILES, F_FFREE, F_NAMEMAX)

    def setattr( self, *args ):
        self.log.debug("setattr %s" % args)

    def symlink ( self, targetPath, linkPath ):
        self.log.debug("symlink %s -> %s" % (targetPath, linkPath))

    def truncate ( self, path, size ):
        self.log.debug("truncate %s: %s (b)" % (path, size))

    def unlink ( self, path ):
        self.log.debug("unlink %s" % path)
        self.log.error("Photos are shown as directories, so use rmdir() to remove them (you don't need unlink)")
        return -fuse.EOPNOTSUPP ## photos are shown as directories, so use rmdir() to remove them (you don't need unlink)

    def utime ( self, path, times ):
        self.log.debug("utime %s" % path)

    def write ( self, path, buf, offset ):
        self._file_cache[path].seek(offset)
        self._file_cache[path].write(buf)
        return len(buf)

    def update_tags(self, photo, new_tags):
        old_tags = [t.strip() for t in parse_text(photo.media.keywords).split(',')]
        self.log.debug("old tags:  %s" % repr(old_tags))
        for t in new_tags:
            tag = t.strip()
            if not tag: continue
            if tag in old_tags:
                old_tags.pop(old_tags.index(tag))
                continue
            self.log.debug("uploading tag: %s" % tag)
            #t = self.picasaweb.InsertTag(photo, tag)
            self.queue.put((self.picasaweb.InsertTag, (photo,tag), {}))
        for tag in old_tags:
            if not tag: continue
            self.log.debug('removing discarded tag: %s' % tag)
            href = '%s/tag/%s' % (photo.GetSelfLink().href, urllib.quote(tag.encode('utf8')))
            self.queue.put((self.picasaweb.Delete, (href,), {}))
            #self.picasaweb.Delete(href)
        self.queue.join()

    def upload_photo(self, albumname, album, title, summary, file_cache, keywords):
        self.picasaweb.InsertPhotoSimple(album, title=title, summary=summary,
            filename_or_handle=file_cache, keywords=keywords)
        self.get_photos(albumname, force_fetch=True) # update
        return 0

    def upload_tag(self, photo, tag):
        self.picasaweb.InsertTag(photo,tag)

    def upload_comment(self, photo, comment):
        c = self.picasaweb.InsertComment(photo, comment)
        self.get_comments(photo, force_fetch=True)

    def update_photo(self, photo, album):
        self.picasaweb.UpdatePhotoMetadata(photo)
        self.get_photos(album, force_fetch=True)

    def upload_album(self, name, comment):
        self.picasaweb.InsertAlbum(name, comment)
        self.get_albums(force_fetch=True) # update

def looks_like_valid_image(path, buf=None):
    """Checks to see if a path is/looks like an image we can upload. Returns bool"""
    valid_formats = ('jpeg', 'jpg', 'bmp', 'gif', 'png')
    logging.info("lloks_like_valid_image")
    # TODO;: Check size (max 20 MiB)
    if USE_MAGIC and buf is not None:
        logging.debug("ckeching magic")
        mime = magic.open(magic.MAGIC_MIME)
        mime.load()
        funkymagic = mime.buffer(buf[0:4096])
        logging.info("found magic:%s" % funkymagic)
        mime.close()
        _class, _format = funkymagic.split("/") # image/jpeg -> image, jpeg
    elif USE_IMAGING and buf is not None:
        logging.debug('trying to load PIL image')
        try:
            stream = ImageFile.Parser()
            stream.feed(buf)
            im = stream.close()
            return im.format.lower() in valid_formats
        except IOError: # PIL does not recognise this as an image
            return False
    else:
        logging.debug('guessing file path')
        try:
            content_type = mimetypes.guess_type(path)[0]
            _class, _format = content_type.split("/") # image/jpeg -> image, jpeg
        except AttributeError, IndexError: # impossible to guess from file name
            return False
    logging.debug('found format: %s / %s', _class, _format)
    return _class == 'image' and _format in valid_formats

def downsize_if_bigger(resize_tuple, image_data):
    is_resized = False
    im = Image.open(image_data)
    if im.size[0]*im.size[1] < resize_tuple[0]*resize_tuple[1]: # image is smaller than target
        logging.debug(" image is already smaller than %s: %s" % (resize_tuple, im.size))
        return image_data, is_resized
    landscape = im.size[0] > im.size[1]
    new_long_side = max(resize_tuple)
    scaling_ratio = float(new_long_side) / float(max(im.size))
    new_short_side = int(min(im.size) * scaling_ratio)
    if landscape: resize_tuple = (new_long_side, new_short_side)
    else: resize_tuple = (new_short_side, new_long_side)
    resized = im.resize(resize_tuple, Image.ANTIALIAS)
    buf = StringIO.StringIO()
    #resized.save(outfile=buf, format='JPEG', quality=85)
    resized.save(buf, 'JPEG', quality=85)
    is_resized = True
    logging.debug(" image is resized to %s: %s (was %s)", resize_tuple, resized.size, im.size)
    return buf, is_resized

def get_text_encoding(bufr):
    if not USE_MAGIC:
        return None
    if hasattr(bufr, 'read'):
        chunk = bufr.read(4096)
    else:
        length = 4096
        if len(bufr) < length : length = len(bufr)
        chunk = bufr[0:length]
    logging.debug("ckeching magic")
    mime = magic.open(magic.MAGIC_MIME)
    mime.load()
    funkymagic = mime.buffer(chunk)
    logging.info("found magic:%s" % funkymagic)
    mime.close()
    try:
        return funkymagic.split("; charset=")[1].strip() # text/plain; charset=us-ascii
    except IndexError:
        return None


def make_easy_path(path):
    """Return a string with only ascii characters, for pretty url's"""
    if not isinstance(path, (str, unicode)):
        return None
    base, ext = os.path.splitext(path)
    return re.sub(r'[^A-Za-z0-9+_-]', "_", base.strip())

def make_human_date(EpochMilliseconds):
    epoch = float(EpochMilliseconds)/1000
    return time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(epoch))

def parse_human_date_to_milliepoch(humandate):
    return int(time.mktime(time.strptime(humandate, '%Y-%m-%d %H:%M:%S')))*1000

def parse_text(obj):
    try:
        return obj.text.decode('utf8')
    except AttributeError:
        return u''

def get_smallest_thumbnail(media_thumbnail_list):
    """Helper function to get the smallest thumbnail of a list of
        gdata.media.Thumbnail.
    Returns gdata.media.Thumbnail """
    r = {}
    for thumb in media_thumbnail_list:
        r[int(thumb.width)*int(thumb.height)] = thumb
    keys = r.keys()
    keys.sort()
    return r[keys[0]]

def background(func, *args, **kw):
    """Run 'func' as a thread, logging any exceptions it throws.

    To run

      somefunc(arg1, arg2='value')

    as a thread, do:

      background(somefunc, arg1, arg2='value')

    Any exceptions thrown are logged as errors, and the traceback is logged.
    """
    thread.start_new_thread(func, args, kw)

class path_chopper:
    album = None
    path  = None
    file  = None
    tags_path = False
    upload_path = False
    tag   = None

    def __init__(self, path):
        _path = [ x for x in path.split(os.path.sep) if x != '' ]
        if 'TAGS' in _path: ## Tag case
            self.tags_path = True
            self.album = _path[0]
            if _path.index('TAGS') > 1:
                self.photo = _path[1]
            #else:
                #if
        elif True in [ s.startswith('UPLOAD') for s in _path ]: ## UPLOADING
            self.upload_path = True
            self.album = _path[0]
            if len(_path[0]) == 3:
                self.file = _path[2]
        elif 'COMMENTS' in _path: ## COMMENTS
            pass

if __name__ == '__main__':
    DEBUG = False
    if '-d' in sys.argv:
        sys.argv.remove('-d')
        DEBUG = True
        import httplib
        httplib.HTTPConnection.debuglevel=1  ## verbosely output http traffic

    if not len(sys.argv) > 1:
        import tempfile
        d = tempfile.mkdtemp()
        try: sys.argv[1] = d
        except IndexError: sys.argv.append(d)
        logging.debug(d)
    print "Mounting PicasaFS at %s" % sys.argv[1]
    options = {}
    server = picasafs(options, sys.argv[1])
    server.multithreaded = 1
    server.debug = (DEBUG and 1) or 0
    server.main()


"""
Apropos open flags

The parameter flags must include one of the following access modes: O_RDONLY, O_WRONLY, or O_RDWR. These request opening the file read-only, write-only, or read/write, respectively.

In addition, zero or more file creation flags and file status flags can be bit-wise-or'd in flags. The file creation flags are O_CREAT, O_EXCL, O_NOCTTY, and O_TRUNC. The file status flags are all of the remaining flags listed below. The distinction between these two groups of flags is that the file status flags can be retrieved and (in some cases) modified using fcntl(2). The full list of file creation flags and file status flags is as follows:Flag name	description
O_APPEND	The file is opened in append mode. Before each write(), the file offset is positioned at the end of the file, as if with lseek(). O_APPEND may lead to corrupted files on NFS file systems if more than one process appends data to a file at once. This is because NFS does not support appending to a file, so the client kernel has to simulate it, which can't be done without a race condition.
O_ASYNC	Enable signal-driven I/O: generate a signal (SIGIO by default, but this can be changed via fcntl(2)) when input or output becomes possible on this file descriptor. This feature is only available for terminals, pseudo-terminals, sockets, and (since Linux 2.6) pipes and FIFOs. See fcntl(2) man page for further details.
O_CREAT	If the file does not exist it will be created. The owner (user ID) of the file is set to the effective user ID of the process. The group ownership (group ID) is set either to the effective group ID of the process or to the group ID of the parent directory (depending on filesystem type and mount options, and the mode of the parent directory, see, e.g., the mount options bsdgroups and sysvgroups of the ext2 filesystem, as described in mount(8) man page).
O_DIRECT	Try to minimize cache effects of the I/O to and from this file. In general this will degrade performance, but it is useful in special situations, such as when applications do their own caching. File I/O is done directly to/from user space buffers. The I/O is synchronous, i.e., at the completion of a read(2) or write(2), data is guaranteed to have been transferred. Under Linux 2.4 transfer sizes, and the alignment of user buffer and file offset must all be multiples of the logical block size of the file system. Under Linux 2.6 alignment to 512-byte boundaries suffices.
O_DIRECTORY	If pathname is not a directory, cause the open to fail. This flag is Linux-specific, and was added in kernel version 2.1.126, to avoid denial-of-service problems if opendir(3) is called on a FIFO or tape device, but should not be used outside of the implementation of opendir.
O_EXCL	When used with O_CREAT, if the file already exists it is an error and the open() will fail. In this context, a symbolic link exists, regardless of where it points to. O_EXCL is broken on NFS file systems; programs which rely on it for performing locking tasks will contain a race condition. The solution for performing atomic file locking using a lockfile is to create a unique file on the same file system (e.g., incorporating hostname and pid), use link(2) to make a link to the lockfile. If link() returns 0, the lock is successful. Otherwise, use stat(2) on the unique file to check if its link count has increased to 2, in which case the lock is also successful.
O_LARGEFILE	(LFS) Allow files whose sizes cannot be represented in an off_t (but can be represented in an off64_t) to be opened. The _LARGEFILE64_SOURCE macro must be defined in order to obtain this definition. Setting the _FILE_OFFSET_BITS feature test macro to 64 (rather than using O_LARGEFILE) is the preferred method of obtaining method of accessing large files on 32-bit systems (see feature_test_macros(7) man page).
O_NOATIME	(Since Linux 2.6.8) Do not update the file last access time (st_atime in the inode) when the file is read(2). This flag is intended for use by indexing or backup programs, where its use can significantly reduce the amount of disk activity. This flag may not be effective on all filesystems. One example is NFS, where the server maintains the access time.
O_NOCTTY	If pathname refers to a terminal device -- see tty(4) man page -- it will not become the process's controlling terminal even if the process does not have one.
O_NOFOLLOW	If pathname is a symbolic link, then the open fails. This is a FreeBSD extension, which was added to Linux in version 2.1.126. Symbolic links in earlier components of the pathname will still be followed.
O_NONBLOCK or O_NDELAY	When possible, the file is opened in non-blocking mode. Neither the open() nor any subsequent operations on the file descriptor which is returned will cause the calling process to wait. For the handling of FIFOs (named pipes), see also fifo(7) man page. For a discussion of the effect of O_NONBLOCK in conjunction with mandatory file locks and with file leases, see fcntl(2) man page.
O_SYNC	The file is opened for synchronous I/O. Any write() s on the resulting file descriptor will block the calling process until the data has been physically written to the underlying hardware.
O_TRUNC	If the file already exists and is a regular file and the open mode allows writing (i.e., is O_RDWR or O_WRONLY) it will be truncated to length 0. If the file is a FIFO or terminal device file, the O_TRUNC flag is ignored. Otherwise the effect of O_TRUNC is unspecified.
"""