#!/usr/bin/python

import localstore
import gdrivestore

import logging
import hashlib
import platform
import sys
import shutil
import tempfile
import StringIO
import time
import unittest2

class MyStringIO(StringIO.StringIO):
    def __init__(self, param):
        StringIO.StringIO.__init__(self, param)
    def __enter__(self):
        return self
    def __exit__(self, type, value, traceback):
        pass

class TestBase:
    def test_metadata(self):
        self.assertEqual({}, self.store.ListMetadata())
        self.assertRaisesRegexp(Exception, "metadata.*not found",
                                self.store.GetMetadata, 5)
        self.store.PutMetadata(5, "blah blah blah")
        self.assertEqual("blah blah blah", self.store.GetMetadata(5))
        m = self.store.ListMetadata()
        self.assertIn(5, m)
        self.assertEqual(1, len(m))

    def test_data(self):
        self.assertEqual([], self.store.ListChunks())
        output = StringIO.StringIO()
        self.assertRaisesRegexp((Exception,OSError), "No such",
                                self.store.GetChunk, "b", 52, output)

        content = "blah blah blah"
        md5 = hashlib.md5(content).hexdigest()
        self.store.PutChunk(lambda: MyStringIO(content), md5, len(content))

        self.assertEqual([(md5, len(content))], self.store.ListChunks())

        output = StringIO.StringIO()
        self.store.GetChunk(md5, len(content), output)
        self.assertEqual(content, output.getvalue())
        

class TestLocalStore(TestBase, unittest2.TestCase):
    def setUp(self):
        self.path = tempfile.mkdtemp()
        self.store = localstore.LocalStore(self.path)
    def tearDown(self):
        shutil.rmtree(self.path)

class TestGdriveStore(TestBase, unittest2.TestCase):
    def setUp(self):
        self.store = gdrivestore.GdriveStore(
            "%s-%s-%s" % (platform.node(), time.asctime(time.localtime()), self.id()),
            "gsync testing")

    # The error handling is a little different in the two
    # implementations, so we only check the exact exceptions on the
    # gdrive implementation.
    def test_data_error_cases(self):
        content = "blah blah blah"
        md5 = hashlib.md5(content).hexdigest()

        self.assertRaisesRegexp(
            Exception, "wrong checksum",
            self.store.PutChunk, lambda: MyStringIO(content), "XXXXX", len(content))

# This gets retried indefinitely now.        
#        self.assertRaisesRegexp(
#            Exception, "400, Invalid Request",
#            self.store.PutChunk, lambda: MyStringIO(content), md5, len(content) + 1)

        # This is the checksum of all but the last byte.  Doesn't get
        # reported as a length error as you might expect.  Oh well.
        self.assertRaisesRegexp(
            Exception, "wrong checksum 55e562bfee2bde4f9e71b8885eb5e303 vs 39f47e994464080ebf595ed925bc6e83",
            self.store.PutChunk, lambda: MyStringIO(content), md5, len(content) - 1)

        # Make sure nothing actually got stored.
        self.assertEqual([], self.store.ListChunks())

        self.store.PutChunk(lambda: MyStringIO(content), md5, len(content))

        output = StringIO.StringIO()
        self.assertRaisesRegexp((Exception,OSError), "No such",
                                self.store.GetChunk, md5, len(content) + 1, output)
        self.assertEqual("", output.getvalue())
        output = StringIO.StringIO()
        self.assertRaisesRegexp((Exception,OSError), "No such",
                                self.store.GetChunk, md5, len(content) - 1, output)
        self.assertEqual("", output.getvalue())
        

if __name__ == '__main__':
    logging.basicConfig( stream=sys.stderr )
    logging.getLogger().setLevel('DEBUG')
    unittest2.main()
