import random
from datetime import *

from core.models import Webcam, Image
import core.utils as utils
import numpy as np
import os.path
import ipca

os.environ['MPLCONFIGDIR'] = 'tmp'

import scipy.stats as ss
from scipy.cluster.vq import kmeans, kmeans2, whiten
from scipy import io

hours = (2, 8, 14, 20) #Which hours to capture. Looks in range [hour-2, hour].


def random_cid():
	return random_camera().id

def random_camera():
	cams = Webcam.objects.all()
	return random.choice(cams)

def random_image(cam):
	'''
		Returns a random image from last 2*24*7 = 336 images for camera.

		Or None, if none exist
	'''

	#imgs = Image.objects.filter(webcam=cam)[0:336]
	oneyear = timedelta(days=100)
	today = fix_dt(date.today())
	lastyear_today = fix_dt(date.today() - oneyear)
	if isinstance(cam, Webcam):
		cam = cam.id
	imgs = Image.images.getImagesInRange(lastyear_today, today, camera=cam)

	if imgs:
		return random.choice(imgs)
	else:
		return None

## this one finds a bunch of random images
def random_images(cam, n = 1):

	d = timedelta(days=100)
	end = datetime.combine(date.today(), time(0,0))
	start = datetime.combine((date.today() - d), time(0,0))
	if isinstance(cam, Webcam):
		cam = cam.id
	imgs = Image.images.getImagesInRange(start, end, camera=cam)
	result = []

	if imgs:
		for i in xrange(n):
			result.append(random.choice(imgs))
	return result

def find_closest_image(cam, hour):
	'''
		Attempts to find an image for the cam from within the last year, around the
		specified hour.

		Images are drawn from within the time range [hour, hour-2].
		If no such image can be found, returns None.
	'''

	# Try to pick image from this last year.
	end_date = fix_dt(date.today())
	start_date = fix_dt(date.today() - timedelta(days=31))

	# Try to pick image close to this hour.
	end_time = time(hour)
	start_time = time(max(hour-2, 0)) # Give us two hour breathing room.

	#iqs = Image.objects.filter(webcam=cam)
	#imgs = utils.time_slice(iqs, start_time, end_time)
	#imgs.order_by('-timestamp')

	imgs = utils.images_in_time_rect(cam, start_date, end_date, start_time, end_time)

	if not imgs:
		return random_image(cam)

	# Return most recent example.
	return imgs[0]

def find_camera_info(cam, random_fallback = True):
	'''
		Returns a list of images for the specified camera at each hour in
		the tuple hours.

		The image found for each hour is the closest image before the hour
		on the most recent day that has an image for that hour, within the
		range [hour-2, hour]. If no such image can be found, if random_fallback
		is True a random image from the last year is used. Otherwise, None
		is used in its place.
	'''

	#Find images for each of our categories.
	imgs = []
	for hour in hours:
		img = find_closest_image(cam, hour)
		if img is None and random_fallback:
			img = random_image(cam)

		imgs.append(img)

	return imgs

def fix_dt(date):
	return datetime.combine(date, time(0,0))

# get a list of images as a montage in a given time rectangle
def get_montage(cam, start_date, end_date, start_time, end_time, count = 18):
	imgs = utils.images_in_time_rect(cam, fix_dt(start_date), fix_dt(end_date), start_time, end_time)

	#time for python magic
	import math
	div = int(math.ceil(float(len(imgs))/float(count)))
	if div < 1:
		div = 1
	return imgs[::div]

# get a montage from around the datetime, and given time delta
def get_montage_around(cam, datetime, time1, time2, date_range):
	delta = timedelta(days=date_range)  # by default return a period of 30 days

	#convert time1 and time2 to hours and minutes
	hour1 = int(time1.split(".")[0])
	minute1 = int(time1.split(".")[1])
	hour2 = int(time2.split(".")[0])
	minute2 = int(time2.split(".")[1])
	start_time = time(hour1,minute1)
	end_time = time(hour2,minute2)
	start_date = datetime - delta/2
	end_date = datetime + delta/2

	return get_montage(cam, start_date, end_date, start_time, end_time)

# get a list of images closest to the given image in time rectangle
#   This uses subset PCA results and project the image to the subset bases
def get_similar_images(cam, start_date, end_date, start_time, end_time, count, image_name):
	if isinstance(cam, Webcam):
		cam = cam.id
	start_date = datetime.combine(start_date,time(0,0))
	end_date = datetime.combine(end_date, time(0,0))
	imgs = utils.images_in_time_rect(cam, start_date, end_date, start_time, end_time)
	# we convert the names to indices
	year = start_date.year
	year_names, coeffs = get_data(cam, year)
	imgs_names = [image.generateImageName() for image in imgs]
	ids = [year_names.index(name) for name in imgs_names]

	# we need to slice out the indices we care about
	subset = coeffs[ids]

	# looking up the coeffs of the given image
	exemplar_idx = year_names.index(image_name)
	exemplar_coeffs = coeffs[exemplar_idx]

	# now we perform svd on this matrix
	c_list = [5]
	ret = []
	for c in c_list:
		V,s,U = ipca.ksvd(subset, c) # note that V and U are flipped in output
		# project the mean to the bases, and compare to each image's coeffs
			# solve for a*x = b using numpy:
			# linalg.lstsq(a,b)
		exemplar_proj = np.linalg.lstsq(U,exemplar_coeffs)[0]
		newSV = np.dot(np.diag(s), V.T)
		# we use the euclidean distance function from np
		dists = [np.linalg.norm(exemplar_proj - each) for each in newSV.T]
		# ranking dists, and their indices
		ranks = ss.rankdata(dists)
		rank_ids = [[rank, ranks.tolist().index(rank)] for rank in ranks]
		sorted_rank_ids = sorted(rank_ids)
		# indices are sorted by their ranks, return the filenames
		ret = [imgs_names[idx[1]] for idx in sorted_rank_ids]
		return ret # add count back at some point!!

def get_similar_images3(cam, start_date, end_date, start_time, end_time, image_name, top, n_intervals):
	if isinstance(cam, Webcam):
		cam = cam.id
	start_date = datetime.combine(start_date,time(0,0))
	end_date = datetime.combine(end_date, time(0,0))
	imgs = utils.images_in_time_rect(cam, start_date, end_date, start_time, end_time)
	# we convert the names to indices
	year = start_date.year
	year_names, coeffs = get_data(cam, year)
	imgs_names = [image.generateImageName() for image in imgs]
	ids = [year_names.index(name) for name in imgs_names]

	# we need to slice out the indices we care about
	subset = coeffs[ids]

	# looking up the coeffs of the given image
	exemplar_idx = year_names.index(image_name)
	exemplar_coeffs = coeffs[exemplar_idx]
	
	ret = []
	V,s,U = ipca.ksvd(subset, 10) # note that V and U are flipped in output
	exemplar_proj = np.linalg.lstsq(U,exemplar_coeffs)[0]
	newSV = np.dot(np.diag(s), V.T)
		# we use the euclidean distance function from np
	dists = [np.linalg.norm(exemplar_proj - each) for each in newSV.T]
	
	dists_ids = []
	for i in xrange(len(ids)):
		dists_ids.append([dists[i],ids[i]])
	# we'll sort it by indices first so they're in order of date
	#dists_ids = sorted(dists_ids, key=lambda x:x[1])
	# length of each time bin
	interval_size = len(dists_ids)/n_intervals
	ret = []
	ret2 = []
	for i in xrange(n_intervals):
		this_interval = dists_ids[i*interval_size:((i+1)*interval_size-1)]	
		this_interval.sort()
		for j in xrange(top):
			dist = this_interval[j][0]
			idx = this_interval[j][1]
			ret.append(year_names[idx])
			ret2.append(dist)
	return (ret, ret2)
	

def get_similar_images2(cam, start_date, end_date, start_time, end_time, image_name, c, k):
	if isinstance(cam, Webcam):
		cam = cam.id
	start_date = datetime.combine(start_date,time(0,0))
	end_date = datetime.combine(end_date, time(0,0))
	imgs = utils.images_in_time_rect(cam, start_date, end_date, start_time, end_time)
	# we convert the names to indices
	year = start_date.year
	year_names, coeffs = get_data(cam, year)
	imgs_names = [image.generateImageName() for image in imgs]
	ids = [year_names.index(name) for name in imgs_names]

	# we need to slice out the indices we care about
	subset = coeffs[ids]

	# looking up the coeffs of the given image
	exemplar_idx = year_names.index(image_name)
	exemplar_coeffs = coeffs[exemplar_idx]
	
	V,s,U = ipca.ksvd(subset, c) # note that V and U are flipped in output
	newSV = np.dot(np.diag(s), V.T)
	whitened = whiten(newSV.T) # whitening is required for kmeans
	# doing the kmeans clustering on this subset
	result = kmeans2(whitened, k)
	
	labels = result[1]
	clustered = []
	imgs_array = []
	choice = None
	for i in xrange(k):
		indices = np.where(labels==i)
		clustered.append([imgs_names[idx] for idx in indices[0]])
		if image_name in clustered[i]:
			choice = i
	return clustered[choice]
	
	

# get the k cluster centroids from the results of subset PCA on the given time rect
#   this performs kmeans clustering on the coefficients of the subset PCA, and returns
#   the images that best depict the centroids
def get_centroids(cam, start_date, end_date, start_time, end_time, c=5, k=3):
	if isinstance(cam, Webcam):
		cam = cam.id
	start_date = datetime.combine(start_date,time(0,0))
	end_date = datetime.combine(end_date, time(0,0))
	imgs = utils.images_in_time_rect(cam, start_date, end_date, start_time, end_time)
	# we convert the names to indices
	year = start_date.year
	year_names, coeffs = get_data(cam, year)
	imgs_names = [image.generateImageName() for image in imgs]
	ids = [year_names.index(name) for name in imgs_names]

	# we need to slice out the indices we care about
	subset = coeffs[ids]
	# subset PCA using 5 components
	V,s,U = ipca.ksvd(subset, c)
	newSV = np.dot(np.diag(s), V.T)
	whitened = whiten(newSV.T) # whitening is required for kmeans
	# doing the kmeans clustering on this subset
	centroids, labels = kmeans2(whitened, k)
	# find the images that best resemble the centroids
	c_ids = []
	ret2 = []
	for i in xrange(len(centroids)):
		dists = [np.linalg.norm(centroids[i] - each) for each in whitened]
		c_ids.append(dists.index(min(dists)))
		ret2.append(labels.tolist().count(i))
	ret = [imgs_names[idx] for idx in c_ids]
	return ret, ret2

def get_montage2(cam, start_date, end_date, start_time, end_time, count=1):

	if isinstance(cam, Webcam):
		cam = cam.id
	start_date = datetime.combine(start_date,time(0,0))
	end_date = datetime.combine(end_date, time(0,0))
	imgs = utils.images_in_time_rect(cam, start_date, end_date, start_time, end_time)
	# we convert the names to indices
	year = start_date.year
	year_names, coeffs = get_data(cam, year)
	imgs_names = [image.generateImageName() for image in imgs]
	ids = [year_names.index(name) for name in imgs_names]

	# we need to slice out the indices we care about
	subset = coeffs[ids]
	mean = np.mean(subset,axis=0) # mean is saved for projection
	# now we perform svd on this matrix
	c_list = [3,5,10]
	ret = []
	for c in c_list:
		V,s,U = ipca.ksvd(subset, c) # note that V and U are flipped in output
		# project the mean to the bases, and compare to each image's coeffs
			# solve for a*x = b using numpy:
			# linalg.lstsq(a,b)
		mean_proj = np.linalg.lstsq(U,mean)[0]
		newSV = np.dot(np.diag(s), V.T)
		# we use the euclidean distance function from np
		dists = [np.linalg.norm(mean_proj - each) for each in newSV.T]
		closest_idx = ids[dists.index(min(dists))] # the minimum's index in the subset leads to the actual index in ids
		# now we need to convert this index back to image name
		ret.append(year_names[closest_idx])
	return ret

# get the PCA coefficients of (cam, year) from file
# returns (filenames, coeffs)
def get_data(cam, year):

	p = get_datafile_path(cam,year)

	import settings

	data_p = '%sdata%s.mat' % (p,year)

	data = io.loadmat(data_p)  # possibly throwing exception
	year_names = data['filenames']
	coeffs = data['coeffs']
	return (year_names.tolist(), coeffs)


def get_data2(cam, year):
	year_names = get_year_names(cam,year)

	p = get_datafile_path(cam,year)
	data_p = '%sdata%s.npz' % (p,year)
	data = np.load(data_p,mmap_mode='r')
	coeffs = None

	try:
		someV = data['V']
		someS = data['S']
		coeffs = np.dot(np.diag(someS),someV.T)
		coeffs = coeffs.T
	except:
		coeffs = data['coeffs']
		w,h,d = np.shape(coeffs)
		coeffs = coeffs.reshape(w,d)

	return (year_names, coeffs)

# get the whole year list of image names used for indexing data
# this will check if a data file is present, if not, it creates it
def get_year_names(cam, year):

	datapath_full = '%simage_names%s.npz' %(get_datafile_path(cam,year), year)
	if (os.path.exists(datapath_full)):
		# load the names directly from file
		datafile = np.load(datapath_full)
		names = datafile['names'].tolist()

	else:
		# generate the names, store it to file
		year_start = datetime(year,1,1,0,0,0)
		year_end = datetime(year,12,31,23,59,59)
		whole_year = Image.images.getImagesInRange(year_start, year_end, camera=cam)
		names = [image.generateImageName() for image in whole_year]
		np.savez(datapath_full,names=names)

	return names

# get the path for the summary datafile from cam id and year
def get_datafile_path(cam, year):
	import settings

	cam8 = '%0*d' %(8,cam)
	cam4 = cam8[-4:]
	cam2 = cam8[-2:]
	datafile_path = '%s/archiveofmanyoutdoorscenes/static/results/%s/%s/%s/' % (os.path.dirname(settings.PROJECT_DIR), cam2, cam4, cam8)
	return datafile_path


# converts images to their indices in the entire year's images
def imgs_2_idx(imgs,cam):
	if isinstance(cam, Webcam):
		cam = cam.id
	im_name = imgs[0].generateImageName()
	year = int(im_name[:4])
	start_time = datetime(year,1,1,0,0,0)
	end_time = datetime(year,12,31,23,59,59)
	whole_year= Image.images.getImagesInRange(start_time, end_time, camera=cam)
	year_names = [image.generateImageName() for image in whole_year]
	imgs_names = [image.generateImageName() for image in imgs]
	return [year_names.index(name) for name in imgs_names]

