from numpy import *
from pylab import *
from matplotlib import rc
from math import *

# Returns the list of class labels (1 or 2) in the same order as they are in the data
def loadClasses():
	data = open("data.txt")
	classes = []
	for line in data:
		classes.append(int(line[0]))
	return classes

# Loads the data into a NumPy matrix and returns it
def loadData():
	data = open("data.txt")
	return matrix([map(float, line.split()[1:]) for line in data])

# Returns the reduced dimensions data as a NumPy matrix
def reduireDimensions(X, alpha):
	means = X.mean(0)
	Xbar = X - means
	Sigma = cov(Xbar, rowvar=0, bias=1)
	D, V = linalg.eig(Sigma)
	for i in range(len(D)):
		if sum(D[:i + 1]) / sum(D) >= alpha:
			k = i
			break
	Vk = transpose(V)[:k + 1]
	return Xbar * transpose(Vk)

# Returns the elements in class 1 and 2 as two lists
def classes():
	classes = transpose(matrix(loadClasses()))
	Z = reduireDimensions(loadData(), 0.9)
	data = append(classes, Z, axis = 1)
	class1 = []
	class2 = []
	for row in data.tolist():
		if row[0] == 1:
			class1.append(row[1:])
		else:
			class2.append(row[1:])
	return class1, class2

def tache1():
	class1, class2 = classes()
	p1 = plot([row[0] for row in class1], [row[1] for row in class1], "bo", label="Classe 1")
	p2 = plot([row[0] for row in class2], [row[1] for row in class2], "ro", label="Classe 2")
	xlabel("Attribut 1")
	ylabel("Attribut 2")
	title("Classes")
	legend()
	show()

def entrainerModele(Z, j):
	Z = matrix(Z)
	mu = Z.mean(0)
	Sigma = 1/float(len(Z)) * transpose(Z) * Z
	return mu, Sigma

def classifierExemple(z, theta1, theta2):
	theta = [theta1, theta2]
	likelyhood = [0, 0]
	priori = [log(0.6), log(0.4)]
	for i in range(2):
		mu, Sigma = theta[i]
		likelyhood[i] = -1/float(2) * log(linalg.det(Sigma)) - \
				1/float(2) * (matrix(z) - mu) * \
				linalg.inv(Sigma) * transpose(matrix(z) - mu)
	if likelyhood[0] + priori[0] > likelyhood[1] + priori[1]:
		return 1
	else:
		return 2	

def validationCroisee(Z, K):
	class1, class2 = classes()
	step1 = len(class1) / K
	step2 = len(class2) / K
	erreurs = []
	for i in range(K):
		start1 = i * step1
		start2 = i * step2
		end1 = start1 + step1
		end2 = start2 + step2
		Z1test = class1[start1:end1]
		Z2test = class2[start2:end2]
		Z1train = class1[:start1]
		Z1train.extend(class1[end1:])
		Z2train = class2[:start2]
		Z2train.extend(class2[end2:])
		theta1 = entrainerModele(Z1train, 1)
		theta2 = entrainerModele(Z2train, 2)
		e = 0
		for z in Z1test:
			c = classifierExemple(z, theta1, theta2)
			if c != 1:
				e = e + 1
		for z in Z2test:
			c = classifierExemple(z, theta1, theta2)
			if c != 2:
				e = e + 1
		erreurs.append(e)
	print erreurs
	print mean(matrix(erreurs))

	
def tache2():
	print "k, erreur"
	for K in (2, 5, 10, 25, 50):
		print "%d, %f" % (K, validationCroisee(loadData(), K))

tache1()
tache2()
