# -*- coding: utf-8 -*-
"""
"""

from sqlalchemy import *
from sqlalchemy.orm import mapper, relation
from sqlalchemy import Table, ForeignKey, Column
from sqlalchemy.types import Integer, Unicode, BLOB

from abserver.model import DeclarativeBase, metadata, DBSession



class Blob(DeclarativeBase):
    """This Entity describes a build artifact. This can be the result of the build, or the Log,
    or, soon, any other 'attachment' that the build system chooses to archive in the server.
    """
    # table options
    __tablename__ = 'blobdata'

    # columns
    blob_id = Column(Integer, primary_key=True, autoincrement=True)
    blob_type = Column(Integer, nullable=False)
    filename = Column(Unicode(255), nullable=True)
    number_files = Column(Integer, nullable=False)
    size_of_data = Column(Integer, nullable=False)
    #data = Column(MSLongBlob, deferred=True)
    data = Column(BLOB)
    task_id = Column(Integer, ForeignKey('task.task_id'))

    # relationships
    task = relation('Task', backref='blobs')
    

    TASK_NONE = 0
    TASK_LOG = 1
    TASK_BUILD_OUTPUT = 2
    TASK_TEST_OUTPUT = 3

    def __init__(self, task, blob_type=TASK_NONE, number_of_files=0, size_of_data=0, filename="", data=None):
        self.task = task
        self.blob_type = blob_type
        self.filename = filename
        self.number_files = number_of_files
        self.size_of_data = size_of_data
        self.data = data
        DBSession.add(self)

    # The data column of the Blob table needs to be written and read to using MySQL's
    # feature that allows the data to come from/go to a file on disk. This assumes
    # that the database server and the ABServer are on the same machine (or at least
    # have a shared filesystem)!!
    def save_blob_data(self, blob_handle):
        load_fn = 'lo_import' #'LOAD_FILE'
        self.data = open(blob_handle.fname(), 'rb').read()
#        DBSession.bind.execute("UPDATE blobdata SET data=%s(\"%s\") WHERE blob_id = %d;" % \
#                               (load_fn, blob_handle.fname().replace('\\', '/'), self.blob_id))

    
    def read_blob_data(self, blob_handle):
        open(blob_handle.fname(), 'wb').write(self.data)
        # SELECT lo_export(image.raster, '/tmp/motd') FROM image
        #WHERE name = 'beautiful image';
        ##DBSession.bind.execute("SELECT data FROM blobdata where blob_id = %d INTO DUMPFILE '%s';" % \
        ##                       (self.blob_id, blob_handle.fname().replace('\\', '/')))


    def blob_type_str(self):
        return ('None', 'Log', 'Build Result', 'Build Test Data')[self.blob_type]

    def size_str(self):
        if self.size_of_data < 1024:
            size = unicode(r.size_of_data) + " Bytes"
        elif self.size_of_data >= 1024 and self.size_of_data < 1048576:
            size = u"%.2f KB" % (self.size_of_data / 1024.0)
        else:
            size = u"%.2f MB" % (self.size_of_data / 1048576.0)
        return size

    
    @staticmethod
    def find_task_blobs(build_id, variety_name, blob_type):
        from abserver.model import Build
        build = Build.get(build_id)
        blobs = []
        if build:
            for t in build.tasks:
                if t.variety == variety_name:
                    for b in t.blobs:
                        if b.blob_type == blob_type:
                            blobs.append(b)
                    break
        return blobs
