from google.appengine.api.datastore_file_stub import DatastoreFileStub as DFS
from google.appengine.datastore import entity_pb
from google.appengine.api import datastore_types
from google.appengine.api import datastore

from pdb import runcall, set_trace as debug
from optparse import OptionParser
from pickletools import dis as dis_pickle
from pickle import load as load_pickle, dump as dump_pickle
from gc import collect as gc
from code import InteractiveConsole as IC
from sys import stdout

import readline

from common.timing import Timing
from my.interactive import examine

class StoredEntity(object):
    def __init__(self, entity):
        # self.protobuf = entity
        # self.encoded_protobuf = entity.Encode()
        self.native = datastore.Entity._FromPb(entity)

    # Todo: render these dynamically in the DFS.
    @property
    def protobuf(self):
        # Used in _Dynamic_Get and _Dynamic_GetSchema
        entity = entity_pb.EntityProto()
        entity.CopyFrom(self.native.ToPb())

        return entity

    @property
    def encoded_protobuf(self):
        # Used only in __WriteDatastore
        return self.entity.Encode()

    def __repr__(self):
        return '%s: %s' % (self.__class__.__name__,
                           dict.__repr__(self.native))

    def toData(self, key_name = None, no_key = False):
        d = dict(self.native)
        if not no_key:
            id = self.native.key().id_or_name()

            try: id = int(id)
            except ValueError:
                pass

            d[key_name or '__key__'] = id

        return d

def getStoredEntity(entity, options):
    e = StoredEntity(entity)
    if options.just_data:
        return e.toData(options.key_name, options.no_key)

    return e

def getAppKind(key):
    # from DatastoreFileStub._AppIdNamespaceKindForKey
    last_path = key.path().element_list()[-1]

    # appid = datastore_types.EncodeAppIdNamespace(key.app(), key.name_space())
    kind = last_path.type()

    return kind

def getKeyId(key):
    def getPathElement(e):
        if e.has_name():
            if e.has_id():
                return '%s%s' % (str(e.name()), str(e.id()))

            return str(e.name())
        elif e.has_id():
            return str(e.id())

        return ''

    keyId = '-'.join(getPathElement(e) for e in key.path().element_list())
    if keyId.isdigit():
        return int(keyId)

def clear_line(line):
    output = '\r%s\r%s' % (' ' * len(line), line)
    stdout.write(output)
    stdout.flush()

def load_pickle_stream(fl):
    try:
        while True:
            yield load_pickle(fl)

    except EOFError:
        pass

import simplejson as json
class KeyEncoder(json.JSONEncoder):
    def default(self, key):
        if isinstance(key, datastore_types.Key):
            # Q: can we infer a general-purpose reference type
            #    to mandate .to_path()?
            id = key.id_or_name()

            try: return int(id)
            except ValueError:
                return id

        raise TypeError(repr(key) + ' is not JSON serializable')

def toJson(db, filename):
    json.dump(db, open(filename, 'w'), cls = KeyEncoder, indent = 1)

def examine_pickle(path, options):
    fl = open(path)
    if options.count:
        entities = load_pickle(fl)
        print len(entities), 'pickles'

    elif options.store:
        db = {}

        if options.is_stream:
            stream = load_pickle_stream(fl)
        else:
            stream = load_pickle(fl)
            total = len(stream)

        nr = 0
        timing = Timing()

        for encoded in stream:
            entity = entity_pb.EntityProto(encoded)
            nr += 1

            key = entity.key()
            app_kind = getAppKind(key)
            if app_kind not in db:
                db[app_kind] = {}

            if not options.is_stream:
                clear_line('Read entity %-6d / %-6d (%2.2f%%)' % \
                           (nr, total, (float(nr) / total) * 100))
            else:
                clear_line('Read entity %-6d...' % nr)

            key_id = getKeyId(key)
            db[app_kind][key_id] = getStoredEntity(entity, options)

        print
        print timing

        if options.jsonify:
            print 'Writing JSON to %r...' % options.jsonify
            toJson(db, options.jsonify)
        else:
            examine(db = db, globals = globals())

    elif options.stream:
        stream = open(options.stream, 'w')

        nr_records = options.nr_records
        assert nr_records

        if options.is_stream:
            data_stream = load_pickle_stream(fl)
        else:
            data_stream = load_pickle(fl)

        nr = 0
        for entity in data_stream:
            nr += 1
            dump_pickle(entity, stream)

            clear_line('Read entity %-6d / %-6d (%2.2f%%)' % \
                       (nr, nr_records, (float(nr) / nr_records) * 100))

            if nr >= nr_records:
                break

        print
        stream.close()

    else:
        if options.is_stream:
            try:
                while True:
                    dis_pickle(fl)

            except EOFError:
                pass
        else:
            dis_pickle(fl)

class TrackedDFS(DFS):
    def __init__(self, appid, datapath, options):
        self.options = options
        super(TrackedDFS, self).__init__(appid, datapath)

    def Read(self):
        result = super(TrackedDFS, self).Read()
        print '%s::Read => %r' % (self.__class__.__name__, result)
        return result

from google.appengine.datastore import datastore_sqlite_stub

def examine_sqlite_datastore(datapath, appid, options):
    datastore = datastore_sqlite_stub.DatastoreSqliteStub(appid, datapath)
    examine(datastore = datastore,
            datapath = datapath,
            appid = appid,
            options = options)

def main(argv = None):
    parser = OptionParser()
    parser.add_option('--debug', action = 'store_true')
    parser.add_option('--appid', default = 'cbanis')
    parser.add_option('--datastore-path', '--datastore',
                      default = 'dev_appserver.datastore')

    parser.add_option('--sqlite', action = 'store_true')
    parser.add_option('--unpickle', action = 'store_true')
    parser.add_option('--count', action = 'store_true')
    parser.add_option('--store', action = 'store_true')

    parser.add_option('--is-stream', action = 'store_true')
    parser.add_option('--stream')
    parser.add_option('--nr-records', type = int)
    parser.add_option('--jsonify')

    parser.add_option('--just-data', action = 'store_true')
    parser.add_option('--key-name')
    parser.add_option('--no-key', action = 'store_true', default = False)

    (options, args) = parser.parse_args(argv)

    appid = options.appid
    datapath = options.datastore_path

    if options.debug:
        dfs = runcall(DFS, appid, datapath)
        # examine(dfs = dfs)
    elif options.unpickle:
        examine_pickle(datapath, options)
    elif options.sqlite:
        examine_sqlite_datastore(datapath, appid, options)
    elif options.track:
        dfs = TrackedDFS(appid, datapath, options)

if __name__ == '__main__':
    main()
