#!/usr/bin/python

import hashlib;
import store;
import os;
import re;
import time;

def ReallyMkDir(path):
    try:
        os.mkdir(path)
    except:
        print "{0} Already exists.".format(path)


class LocalStore(store.StoreInterface):
    def __init__(self, dir):
        self.md5_dir = dir + "/localstore/data"
        self.snapshots_dir = dir + "/localstore/snapshots"
        ReallyMkDir(dir + "/localstore")
        ReallyMkDir(self.md5_dir)
        ReallyMkDir(self.snapshots_dir)

    # Returns a map from metadata version number to timestamp
    def ListMetadata(self):
        mds = os.listdir(self.snapshots_dir)
        toreturn = {}
        for i in mds:
            match = re.match(r'metadata-(\d*)-(.*)', i)
            if match:
                toreturn[int(match.group(1))] = match.group(2)
        return toreturn

    # Returns a string containing the contents of the given version number
    def GetMetadata(self, version_number):
        mds = os.listdir(self.snapshots_dir)
        for i in mds:
            match = re.match(r'metadata-(\d*)-(.*)', i)
            if match and int(match.group(1)) == version_number:
                fh = open("{0}/{1}".format(self.snapshots_dir, i), "rb")
                return fh.read();
        raise Exception("metadata %d not found" % version_number)

    # Attempts to store 'contents' as metadata version
    # 'version_number'.  It returns false if it already exists (and also
    # forbid concurrent puts of the same new version).
    # TODO(dpeng): Is this actually implementable??
    def PutMetadata(self, version_number, contents):
        filename = ('metadata-%d-%s'
                    % (version_number, time.asctime(time.gmtime())))
        fh = open("{0}/{1}".format(self.snapshots_dir, filename), "wb")
        fh.write(contents)
                  
    # Stores the filehandle's contents.
    def PutChunk(self, filehandle_factory, expected_md5, expected_length):
        with filehandle_factory() as filehandle:
            md5s = []
            ateof = False
            temp_filename = self.md5_dir + "/temp"
            m = hashlib.md5()
            # read chunksize worth of data in 1MB pieces
            bytes_read = 0;
            out_fh = open(temp_filename, "wb")
            data = filehandle.read(1048576)
            while data:
                bytes_read += len(data)
                m.update(data)
                out_fh.write(data)
                data = filehandle.read(1048576)

            if bytes_read != expected_length:
                raise Exception("wrong length, expected %d got %d"
                                % (expected_length, bytes_read))
            if m.hexdigest() != expected_md5:
                raise Exception("wrong checksum, expected %s got %s"
                                % (expected_md5, m.hexdigest()))

            # rename the temporary file
            filename = self.md5_dir + "/" + m.hexdigest();
            os.rename(temp_filename, filename)


    # Fetches the file indexed by 'md5sum' and writes it to 'filehandle'.
    def GetChunk(self, md5sum, expected_length, filehandle):
        f = open(self.md5_dir + "/" + md5sum, "rb")
        filehandle.write(f.read())
    
    # Returns a list of (md5sum, length).
    def ListChunks(self):
        chunks = []
        for md5 in os.listdir(self.md5_dir):
            filename = "%s/%s" % (self.md5_dir, md5)
            chunks.append((md5, os.stat(filename).st_size))
        return chunks
