# -*- coding: utf-8 -*-

from updateLSSP import *
import math
import os

class QueryLSA: 
    """Class for analysing a query using Latent Semantic Analysis (LSA) from previously updated Latent Semantic Space (LSSP) instance (for more info see updateLSSP.py).  
    LSSP Matrices are stored as extend numpy package ndarrays using the class MetaArray from Luke Campagnola (available at http://www.scipy.org/Cookbook/MetaArray).
    The module has been written for extensibility and portability rather than performance.
    Example and Doctest :
    >>> lssp  = UpdateLSSP(test_mode = True, dimensions = 9 , tf_idf = False)
    >>> QueryLSA( "front report", True, tf_idf =False).results
    [(0.79253464341479629, 'doc5'), (0.54915494993105951, 'doc7'), (0.50226912260445555, 'doc2'), (0.24408832600652672, 'doc4'), (0.24290317960917179, 'doc6'), (0.00091592595791802447, 'doc10'), (-0.0017351876141954343, 'doc3'), (-0.0019420341077521026, 'doc1'), (-0.0030079996328369416, 'doc8'), (-0.004575959674413474, 'doc9')]
    """ 
    
    SVD = None 
    U = None 
    Vt = None 
    S = None 
    tdm_pseudo_doc = None
    svd_pseudo_doc = None
    words = None 
    docs = None
    dimensions = None
    binarydb_location = None 
    results = None
    query = None 
    tf_idf = None
    
    def cosine(self, A, B):
        """ Returns the distance between 2 vectors A and B.
            The cosine distance is the most used when performing LSA results.
            cosine(A,B)  = ( A * B ) /( |A| x |B| )
        """
        div = sqrt( dot(transpose(A),A) * dot(transpose(B),B) )
        if div== 0. :
             return -1.
        else :
            return float(dot(transpose(A),B) / div )
         
    def __get_pseudo_doc(self):
        """Private method used by the class constructor.
        Generates the TDM and SVD pseudo documents, they are the equivalent of the query as a new column in the SVD and TDM matrices.
        First a new column of the TDM is generated, then with the U and S matrices (see updateLSSP.py for more info) the pseudo document is calculated.
        The LSSP matrices must be up to date before generating the pseudo document. 
        """
        query_words = self.query.split()
        unique_words = set(query_words)
        pseudo_doc_rows = shape(self.TDM)[0]
        self.tdm_pseudo_doc =  zeros( (pseudo_doc_rows,1),float )
        for word in unique_words:
            if word in self.words : 
                self.tdm_pseudo_doc[self.words.index(word)] = query_words.count(word)
        if self.tf_idf :
            for word in unique_words:
                if word in self.words : 
                    tf = float(self.tdm_pseudo_doc[self.words.index(word)]) / len(query_words)
                    idf = math.log10(float(len(self.docs)) / (1 + len(argwhere(self.TDM["Words":word,:].view(ndarray))) ))
                    self.tdm_pseudo_doc[self.words.index(word)] = tf*idf        
        self.svd_pseudo_doc = dot( dot( transpose(self.tdm_pseudo_doc)  , self.U.view(ndarray)) , linalg.inv(self.S.view(ndarray)) )
        self.svd_pseudo_doc = dot(dot(self.U.view(ndarray),self.S.view(ndarray)),transpose(self.svd_pseudo_doc))
        
    def __init__(self, query , test_mode = None, matrix_file_location = None,tf_idf = True):
        """
        Constructor of a new latent semantic analysis (LSA) query.
        The test_mode parameter will use the test DB (for more info see interfacedb).
        The matrix_file_location defines the location where the matrices will be stored, by default is the current directory matrix_file_location ="" 
        unless if test_mode is True, then matrix_file_location ="testMatrix/".
        By default the tf_idf parameter makes the LSA using a tf_idf generated pseudo document(for more info see UpdateLSSP).
        """
        if matrix_file_location == None  :
            if test_mode != None:
                self.binarydb_location = TEST_FILE_LOCATION
            else: 
                self.binarydb_location = DEFAULT_FILE_LOCATION
        else : 
            self.binarydb_location = matrix_file_location
        self.query = query
        self.tf_idf = tf_idf 
        try: 
            self.TDM = MetaArray(file=self.binarydb_location+'TDM')
            self.words  = [ elem['name'] for elem in self.TDM._info[0]['cols'] ] #word list is the cols of TDM axis 0
            self.docs = [ elem['name'] for elem in self.TDM._info[1]['cols'] ]
            self.dimensions = self.TDM._info[-1]['dimensions'] #extra information contained at the end of metaArray
            self.U = MetaArray(file=self.binarydb_location+'U') 
            self.Vt = MetaArray(file=self.binarydb_location+'Vt') 
            self.S = MetaArray(file=self.binarydb_location+'S') 
            self.SVD = MetaArray(file=self.binarydb_location+'SVD')
            self.__get_pseudo_doc()
            self.results  = []
            for doc in self.docs : 
                self.results.append( ((self.cosine(self.svd_pseudo_doc , self.SVD[:,"Docs":doc].view(ndarray) )+1.)/2., doc)  )
            self.results = sorted(self.results , reverse=True)
        except IOError,e:
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            print "!!!!!!!!!!!!!!!!!!!!!!Warning : No Latent Semantic Space (LSSP) found !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
            print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"

if __name__ == "__main__":
    import doctest
    doctest.testmod()
