# Bandits with dependent arms arms help to explore the space when
# we have to many arms to try all of them and can benefit because of 
# talking dependences into account

import numpy
import math
import random
import Distance
import os

class GPDependantUCB(object):
    # SIGMA for Gaussian kernel
    SIGMA = 0.3
    # NOISE for distance matrix
    NOISE = 0.05
    
    def __init__(self, file, distance_type, images_number, clustering_type):
        # rewards got for the user
        self.reward = []
        # names (numbers) of images alreagy presented to the user
        self.images_shown = []
        self.images_number = images_number
        
        self.path1 = os.path.dirname(__file__) + '/ProximityMatrix/'
        # from input file we will read matrix of distances
        self.input = self.path1 + file + str(self.images_number) + '_' + distance_type + '.dist'        
        
        # previous_images contains images presented on the previuos iteration
        self.previous_images = []
        self.LoadKernel()
        # time is T
        self.T = 0
        
        # sufficient statistics for decision making - mean and standard deviation
        self.mean = numpy.array([])
        self.sigma = numpy.array([])
        self.setsize = 10
        
        self.pp = Distance.SimpleProximity(file, distance_type, images_number)
        
        self.k = numpy.array([])
        
    def LoadKernel(self):
        # first load distances
        self.kernel = numpy.genfromtxt(self.input, unpack=True)
        # calculated Gaussian kernel
        ss = 2*(GPDependantUCB.SIGMA**2)
        self.kernel = numpy.exp(-(self.kernel)/2*(GPDependantUCB.SIGMA**2))
        
        
    # Update arms' statistics - mean and variance
    def UpdateArms(self, images, relevance_scores):
       # print relevance_scores
        # names of images start from 0
        # When lines with image repeat the matrix becomes singular
        new_images = []
        # for every image from the previous iteration
        for image in images:
            # if image wasn't presented before
            if image not in self.images_shown:
                # add it to the list of presented images and reward on it
                new_images.append(image)
                self.images_shown.append(image)
                self.reward.append(relevance_scores[image])
            else:
                # if it was presented before, update the reward
                self.reward[self.images_shown.index(image)] += relevance_scores[image]
        
        # distance from all images to images that have already been presented
        '''
        if len(self.images_shown) == self.setsize:
            self.k = self.pp.SetDistances(range(self.images_number), self.images_shown)
        else:
            
            self.k = numpy.hstack((self.k,self.pp.SetDistances(range(self.images_number), new_images)))
            '''
        
        self.k = self.kernel[self.images_shown]
        
        sn = (GPDependantUCB.NOISE**2)
        p = self.k.T
        sn = (GPDependantUCB.NOISE**2)*numpy.identity(len(self.images_shown))
        #sn = (GPDependantUCB.NOISE**2)
        C = (p[self.images_shown]) + sn
        #print numpy.shape(self.k.T)
        #print numpy.shape(C)
        #print numpy.shape(self.reward)
        self.mean = numpy.dot(numpy.dot(self.k.T,numpy.linalg.inv(C)),numpy.array(self.reward))
        self.sigma = numpy.sqrt(numpy.ones((self.images_number)) + GPDependantUCB.NOISE**2 - numpy.diagonal(numpy.dot(numpy.dot(self.k.T,numpy.linalg.inv(C)),self.k)))
            
        

        '''
        # C matrix contains images presented with squared noise added to it          
        sn = (GPDependantUCB.NOISE**2)*numpy.identity(len(self.images_shown))
        #sn = (GPDependantUCB.NOISE**2)
        C = (self.k[self.images_shown]).T + sn
        
        # calculate mean for arms
        self.mean = numpy.dot(numpy.dot(self.k,numpy.linalg.inv(C)),numpy.array(self.reward).T)
        # calculate standard deviation for arms
        self.sigma = numpy.sqrt(numpy.ones((self.images_number)) + GPDependantUCB.NOISE**2 - numpy.diagonal(numpy.dot(numpy.dot(self.k,numpy.linalg.inv(C)),self.k.T)))
        '''
        
    # sample #setsize images randomly
    def ChooseFirstImageSet(self, setsize):
        chosen_images = []
        while len(chosen_images) < setsize:
            image = random.randint(0, self.images_number-1)
            if image not in chosen_images:
                chosen_images.append(image)
        # save images as been presented
        self.previous_images = chosen_images
        # update timing
        self.T += setsize
        self.setsize = setsize
        
        return chosen_images

    # sample #setsize images based on their statistics 
    def ChooseImages(self, setsize):
        chosen_images = []
        # calculate decision criteria to choose images
        decision = self.mean.T + math.sqrt(self.T) * self.sigma
        # sort indeces of images
        indeces = numpy.argsort(decision)
        i = 0
        while len(chosen_images) < setsize:
            # starting from the last image
            image = indeces[len(indeces)-i-1]
            if image not in self.previous_images:
                chosen_images.append(image)
            i += 1
        self.previous_images = chosen_images
        self.T += setsize
        self.setsize = setsize
        
        return chosen_images
