#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# test different PAGE_SIZE
#

# DEFAULT(0)/FILE(1)/MEMORY(2)
# self.db.execute( 'PRAGMA temp_store = MEMORY; ' )
#
# FULL(2)/NORMAL(1)/OFF(0)
# self.db.execute( 'PRAGMA synchronous = OFF; ' )
#


import os,os.path, time, tempfile

import sqlite3 as sqlite

import celtuce.help.logging as logging

from bisect import insort


try :
    from hashlib import md5 as MD5
except :
    from md5 import md5 as MD5

PATH = os.sep.join((tempfile.gettempdir(), 'celtuce/celldb.4'))

DB_FILE = 'test.sqlite.db'

TABLE_NAME = 'testsqlite'

class   TestSqliteSpeed(object) :
    def setUp(self) :
        self._logger = logging.getLogger()

        if os.path.exists(PATH) :
            self.removedirs(PATH)

        os.makedirs(PATH, 0777)

        self._db_filename = os.sep.join((PATH, DB_FILE))
        ## setUp()

    def tearDown(self) :
        pass
        ## tearDown()

    def removedirs(self, path) :
        items = os.listdir(path)
        for name in items :
            fullpath = os.sep.join((path, name))
            if os.path.isdir(fullpath) :
                self.removedirs(fullpath)
            else :
                os.remove(fullpath)

        os.rmdir(path)
        ## removedirs

    def run(self) :
        COUNT_RECORDS = 1000000

        TITLE = 'UniqueIndex(key)'
        SQL_CREATE_TABLE = 'CREATE TABLE IF NOT EXISTS %s (key TEXT, op INTEGER, ts INTEGER, fileno INTEGER, offset INTEGER, length INTEGER)' % TABLE_NAME
        SQL_CREATE_INDEX = 'CREATE UNIQUE INDEX IF NOT EXISTS %s_idx_key ON %s (key)' % (TABLE_NAME, TABLE_NAME)

        for page_size in (4096, 8192) :
            for cache_size in (5000, 10000, 15000) :
                for synchronous in ('FULL', 'NORMAL', 'OFF') :
                    #self.setUp()
                    #self.test_1( TITLE, SQL_CREATE_TABLE, SQL_CREATE_INDEX, COUNT_RECORDS, page_size, cache_size, synchronous )
                    #self.tearDown()

                    self.setUp()
                    self.test_2( TITLE, SQL_CREATE_TABLE, SQL_CREATE_INDEX, COUNT_RECORDS, page_size, cache_size, synchronous )
                    self.tearDown()


        ## run()

    ###################################################################

    def showSqliteConfig(self, cursor) :
        cursor.execute("PRAGMA page_size")
        page_size=int(cursor.fetchone()[0])

        cursor.execute("PRAGMA cache_size")
        cache_size = int(cursor.fetchone()[0])

        cursor.execute("PRAGMA synchronous")
        synchronous=int(cursor.fetchone()[0])

        self._logger.info('SQLite Configure : page_size=%d Bytes, cache_size=%d pages, synchronous=%d', page_size, cache_size, synchronous)
        ## showSqliteConfig()

    def test_1(self, title, sql_create_table, sql_create_index, count_records, page_size=4096, cache_size=10000, synchronous='FULL', isolation='') :
        self._logger.info('test_1 "%s" with order-key ...', title)
        self._logger.info('    ISOLATION-LEVEL=%r', isolation)
        self._logger.info('    PAGE-SIZE=%d', page_size)
        self._logger.info('   CACHE-SIZE=%d pages', cache_size)
        self._logger.info('  SYNCHRONOUS=%s', synchronous)
        dbh = sqlite.connect( self._db_filename )
        dbh.isolation_level = isolation
        dbh.execute("PRAGMA page_size = %d" % page_size)
        dbh.execute("PRAGMA cache_size = %d" % cache_size)
        dbh.execute("PRAGMA synchronous = %s;" % synchronous)

        dbh.execute( sql_create_table )
        if sql_create_index :
            dbh.execute( sql_create_index )

        dbc = dbh.cursor()
        self.showSqliteConfig( dbc )

        SQL_INSERT = 'INSERT INTO %s(key,op,ts,fileno,offset,length) VALUES(?,?,?,?,?,?)' % TABLE_NAME

        time_start = time.time()

        key_prefix = 'hello,world.hello,world.hello,world.'
        op = 0
        ts = 12345678
        fileno = 0
        offset = 0
        length = len(key_prefix)

        #if isolation :
        #    dbc.execute('BEGIN')

        for i in xrange(count_records) :
            key = '-'.join((key_prefix, str(i)))
            dbc.execute(SQL_INSERT,(key,op,ts,fileno,offset,length))
            offset += length

            if (i > 0) and (0 == (i % 50000)) :
                time_last = time.time() - time_start
                tps = i / time_last
                self._logger.info(' - insert %d records last %.2f seconds, tps:%.2f ...', i, time_last, tps)
    
        #if isolation :
        #    dbc.execute('COMMIT')
        dbh.commit()

        time_last = time.time() - time_start
        tps = count_records / time_last
        self._logger.info(' - insert %d records last %.2f seconds, tps:%.2f ...', count_records, time_last, tps)

        # test finding 
        SQL_FIND_BY_KEY = 'SELECT op,ts,fileno,offset,length FROM %s WHERE key=? ORDER BY ts DESC' % TABLE_NAME
        time_start = time.time()

        for i in xrange(count_records) :
            key = '-'.join((key_prefix, str(i)))
            dbc.execute(SQL_FIND_BY_KEY,(key,))
            row = dbc.fetchone()

            if (i > 0) and (0 == (i % 50000)) :
                time_last = time.time() - time_start
                tps = i / time_last
                self._logger.info(' - find %d records last %.2f seconds, tps:%.2f ...', i, time_last, tps)
    
        time_last = time.time() - time_start
        tps = count_records / time_last
        self._logger.info(' - find %d records last %.2f seconds, tps:%.2f ...', count_records, time_last, tps)

        dbh.commit()
        dbh.close()
        dbh = None

        self._logger.info('test_1 "%s" with order-key done.', title)
        ## test_1()

    def test_2(self, title, sql_create_table, sql_create_index, count_records, page_size=4096, cache_size=10000, synchronous='FULL', isolation='') :
        self._logger.info('test_2 "%s" with md5-key ...', title)
        self._logger.info('    ISOLATION-LEVEL=%r', isolation)
        self._logger.info('    PAGE-SIZE=%d', page_size)
        self._logger.info('   CACHE-SIZE=%d pages', cache_size)
        self._logger.info('  SYNCHRONOUS=%s', synchronous)
        dbh = sqlite.connect( self._db_filename )
        dbh.isolation_level = isolation
        dbh.execute("PRAGMA page_size = %d" % page_size)
        dbh.execute("PRAGMA cache_size = %d" % cache_size)
        dbh.execute("PRAGMA synchronous = %s;" % synchronous)

        dbh.execute( sql_create_table )
        if sql_create_index :
            dbh.execute( sql_create_index )

        dbc = dbh.cursor()
        self.showSqliteConfig( dbc )

        SQL_INSERT = 'INSERT INTO %s(key,op,ts,fileno,offset,length) VALUES(?,?,?,?,?,?)' % TABLE_NAME

        time_start = time.time()

        key_prefix = 'hello,world.hello,world.hello,world.'
        op = 0
        ts = 12345678
        fileno = 0
        offset = 0
        length = len(key_prefix)

        #if isolation :
        #    dbc.execute('BEGIN')

        for i in xrange(count_records) :
            key = MD5('-'.join((key_prefix, str(i)))).hexdigest()
            dbc.execute(SQL_INSERT,(key,op,ts,fileno,offset,length))
            offset += length

            if (i > 0) and (0 == (i % 50000)) :
                time_last = time.time() - time_start
                tps = i / time_last
                self._logger.info(' - insert %d records last %.2f seconds, tps:%.2f ...', i, time_last, tps)
    
        #if isolation :
        #    dbc.execute('COMMIT')
        dbh.commit()

        time_last = time.time() - time_start
        tps = count_records / time_last
        self._logger.info(' - insert %d records last %.2f seconds, tps:%.2f ...', count_records, time_last, tps)

        # test finding 
        SQL_FIND_BY_KEY = 'SELECT op,ts,fileno,offset,length FROM %s WHERE key=? ORDER BY ts DESC' % TABLE_NAME
        time_start = time.time()

        for i in xrange(count_records) :
            key = MD5('-'.join((key_prefix, str(i)))).hexdigest()
            dbc.execute(SQL_FIND_BY_KEY,(key,))
            row = dbc.fetchone()

            if (i > 0) and (0 == (i % 50000)) :
                time_last = time.time() - time_start
                tps = i / time_last
                self._logger.info(' - find %d records last %.2f seconds, tps:%.2f ...', i, time_last, tps)
    
        time_last = time.time() - time_start
        tps = count_records / time_last
        self._logger.info(' - find %d records last %.2f seconds, tps:%.2f ...', count_records, time_last, tps)

        dbh.commit()
        dbh.close()
        dbh = None

        self._logger.info('test_2 "%s" with md5-key done.', title)
        ## test_2()

    def test_3(self, title, sql_create_table, sql_create_index, count_records, page_size=4096, cache_size=10000, synchronous='FULL', isolation='') :
        self._logger.info('test_3 "%s" with cached-md5-key ...', title)
        self._logger.info('    ISOLATION-LEVEL=%r', isolation)
        self._logger.info('    PAGE-SIZE=%d', page_size)
        self._logger.info('   CACHE-SIZE=%d pages', cache_size)
        self._logger.info('  SYNCHRONOUS=%s', synchronous)
        dbh = sqlite.connect( self._db_filename )
        dbh.isolation_level = isolation
        dbh.execute("PRAGMA page_size = %d" % page_size)
        dbh.execute("PRAGMA cache_size = %d" % cache_size)
        dbh.execute("PRAGMA synchronous = %s;" % synchronous)

        dbh.execute( sql_create_table )
        if sql_create_index :
            dbh.execute( sql_create_index )

        dbc = dbh.cursor()
        self.showSqliteConfig( dbc )

        SQL_INSERT = 'INSERT INTO %s(key,op,ts,fileno,offset,length) VALUES(?,?,?,?,?,?)' % TABLE_NAME

        time_start = time.time()

        key_prefix = 'hello,world.hello,world.hello,world.'
        op = 0
        ts = 12345678
        fileno = 0
        offset = 0
        length = len(key_prefix)

        cached = []
        cached_count = 0

        #if isolation :
        #    dbc.execute('BEGIN')

        for i in xrange(count_records) :
            key = MD5('-'.join((key_prefix, str(i)))).hexdigest()
            insort(cached, (key,op,ts,fileno,offset,length))
            cached_count += 1

            offset += length

            if cached_count >= 10000 :
                for key,op,ts,fileno,offset,length in cached :
                    dbc.execute(SQL_INSERT,(key,op,ts,fileno,offset,length))

                cached = []
                cached_count = 0

            if (i > 0) and (0 == (i % 50000)) :
                time_last = time.time() - time_start
                tps = i / time_last
                self._logger.info(' - insert %d records last %.2f seconds, tps:%.2f ...', i, time_last, tps)
    
        if cached_count >= 1000 :
            for key,op,ts,fileno,offset,length in cached :
                dbc.execute(SQL_INSERT,(key,op,ts,fileno,offset,length))

            cached = []
            cached_count = 0

        #if isolation :
        #    dbc.execute('COMMIT')
        dbh.commit()

        time_last = time.time() - time_start
        tps = count_records / time_last
        self._logger.info(' - insert %d records last %.2f seconds, tps:%.2f ...', count_records, time_last, tps)

        # test finding 
        SQL_FIND_BY_KEY = 'SELECT op,ts,fileno,offset,length FROM %s WHERE key=? ORDER BY ts DESC' % TABLE_NAME
        time_start = time.time()

        for i in xrange(count_records) :
            key = MD5('-'.join((key_prefix, str(i)))).hexdigest()
            dbc.execute(SQL_FIND_BY_KEY,(key,))
            row = dbc.fetchone()

            if (i > 0) and (0 == (i % 50000)) :
                time_last = time.time() - time_start
                tps = i / time_last
                self._logger.info(' - find %d records last %.2f seconds, tps:%.2f ...', i, time_last, tps)
    
        time_last = time.time() - time_start
        tps = count_records / time_last
        self._logger.info(' - find %d records last %.2f seconds, tps:%.2f ...', count_records, time_last, tps)

        dbh.commit()
        dbh.close()
        dbh = None

        self._logger.info('test_3 "%s" with cached-md5-key done.', title)
        ## test_3()

    ## class TestSqliteSpeed


if  __name__ == '__main__' :
    t = TestSqliteSpeed()
    t.run()

