#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Coursework in Python 
from IDAPICourseworkLibrary import *
from numpy import *
#
# Coursework 1 begins here
#
# Function to compute the prior distribution of the variable root from the data set
def Prior(theData, root, noStates):
    prior = zeros((noStates[root]), float )
    for row in theData:
      state = row[root]
      prior[state] += 1
    for i in range(len(prior)):
      prior[i] = prior[i] / len(theData)  
# Coursework 1 task 1 should be inserted here
    
# end of Coursework 1 task 1
    return prior
# Function to compute a CPT with parent node varP and xchild node varC from the data array
# it is assumed that the states are designated by consecutive integers starting with 0
def CPT(theData, varC, varP, noStates):
    cPT = zeros((noStates[varC], noStates[varP]), float )  
# Coursework 1 task 2 should be inserte4d here
    #totalsC = zeros(noStates[varC], float)
    totalsP = zeros(noStates[varP], float)
    for row in theData:
      stateC = row[varC]
      stateP = row[varP]
      totalsP[stateP] += 1
      cPT[stateC][stateP] += 1
    for i in range(len(totalsP)):
      for j in range(noStates[varC]):
	cPT[j][i] = 1 / noStates[varC] if totalsP[i] == 0 else cPT[j][i] / totalsP[i]
   
# end of coursework 1 task 2
    return cPT
# Function to calculate the joint probability table of two variables in the data set
def JPT(theData, varRow, varCol, noStates):
    jPT = zeros((noStates[varRow], noStates[varCol]), float )
    n = len(theData)
    for row in theData:
      stateR = row[varRow]
      stateC = row[varCol]
      jPT[stateR][stateC] += 1.0 / n
#Coursework 1 task 3 should be inserted here 
    
# end of coursework 1 task 3
    return jPT
#
# Function to convert a joint probability table to a conditional probability table
def JPT2CPT(aJPT):
#Coursework 1 task 4 should be inserted here 
    [m, n] = shape(aJPT)
    for j in range(n):
      sum = 0.0
      for i in range(m):
	sum += aJPT[i][j]
      alpha = 1.0 / sum
      for i in range(m):
	aJPT[i][j] *= alpha 
# coursework 1 taks 4 ends here
    return aJPT

#
# Function to query a naive Bayesian network
def Query(theQuery, naiveBayes): 
    rootPdf = zeros((naiveBayes[0].shape[0]), float)
    sum = 0.0
# Coursework 1 task 5 should be inserted here
    for s in range(len(rootPdf)):
      prod = 1
      for i in range(1, len(naiveBayes)):
	cpt = naiveBayes[i]
	sChild = theQuery[i-1]
	prod *= cpt[sChild][s] 
      rootPdf[s] = prod * naiveBayes[0][s]
      sum += rootPdf[s]
    for s in range(len(rootPdf)):
      rootPdf[s] /= sum
# end of coursework 1 task 5
    return rootPdf
#
# End of Coursework 1
#
# Coursework 2 begins here
#
# Calculate the mutual information from the joint probability table of two variables
def MutualInformation(jP):
    mi=0.0
    pA = zeros(jP.shape[0], float)
    pB = zeros(jP.shape[1], float)
    for row in range(jP.shape[0]):
      for col in range(jP.shape[1]):
        pA[row] = pA[row] + jP[row][col]
    for col in range(jP.shape[1]):
      for row in range(jP.shape[0]):
        pB[col] = pB[col] + jP[row][col]
    #print pA, pB
    #print sum(pA), sum(pB)
    for row in range(jP.shape[0]):
      for col in range(jP.shape[1]):
        if pB[col] > 0 and jP[row][col] > 0:
          mi = mi + log2( jP[row][col] / (pA[row] * pB[col]) ) * jP[row][col]
    return mi
#
# construct a dependency matrix for all the variables
def DependencyMatrix(theData, noVariables):
    MIMatrix = zeros((noVariables,noVariables))
    noStates = zeros(noVariables, int)
    for i in range(len(theData)):
      for j in range(noVariables):
        noStates[j] = max(noStates[j], theData[i][j] + 1)
    for i in range(noVariables):
      for j in range(noVariables):
	jpt = JPT(theData, i, j, noStates)
	MIMatrix[i][j] = MutualInformation(jpt)
    return MIMatrix
# Function to compute an ordered list of dependencies 
def DependencyList(depMatrix):
    depList=[]
    for i in range(depMatrix.shape[0]):
      for j in range(i, depMatrix.shape[1]):
	if i != j:
	  depList.append( [depMatrix[i][j], i, j])
    depList = sorted(depList, key=lambda arc: arc[0], reverse=True)
    return array(depList)
#
# Functions implementing the spanning tree algorithm
# Coursework 2 task 4

def AddArc(paths, x, y, noVariables):
  paths[x][y] = 1
  paths[y][x] = 1
  for i in range(noVariables):
    if (paths[i][x] == 1 or paths[x][i] == 1) and paths[i][y] == 0:
      paths = AddArc(paths, i,y, noVariables)
    if (paths[i][y] == 1 or paths[y][i] == 1) and paths[i][x] == 0:
      paths = AddArc(paths, i,x, noVariables)
  return paths

def SpanningTreeAlgorithm(depList, noVariables):
    spanningTree = []
    paths = zeros((noVariables, noVariables), int)
    for arc in depList:
      x = int(arc[1])
      y = int(arc[2])
      #print paths, "\n"
      if paths[x][y] == 0:
	#print "not skipping arc ", x, y, "because no path"
	spanningTree.append(arc)
	paths = AddArc(paths, x, y, noVariables) 
      #else:
	#print "skipping arc ", x, y, "since there is already path"
    return array(spanningTree)
#
# End of coursework 2
#
# Coursework 3 begins here
#
# Function to compute a CPT with multiple parents from he data set
# it is assumed that the states are designated by consecutive integers starting with 0
def CPT_2(theData, child, parent1, parent2, noStates):
    cPT = zeros([noStates[child],noStates[parent1],noStates[parent2]], float )
# Coursework 3 task 1 should be inserted here
   

# End of Coursework 3 task 1           
    return cPT
#
# Definition of a Bayesian Network
def ExampleBayesianNetwork(theData, noStates):
    arcList = [[0],[1],[2,0],[3,2,1],[4,3],[5,3]]
    cpt0 = Prior(theData, 0, noStates)
    cpt1 = Prior(theData, 1, noStates)
    cpt2 = CPT(theData, 2, 0, noStates)
    cpt3 = CPT_2(theData, 3, 2, 1, noStates)
    cpt4 = CPT(theData, 4, 3, noStates)
    cpt5 = CPT(theData, 5, 3, noStates)
    cptList = [cpt0, cpt1, cpt2, cpt3, cpt4, cpt5]
    return arcList, cptList
# Coursework 3 task 2 begins here

# end of coursework 3 task 2
#
# Function to calculate the MDL size of a Bayesian Network
def MDLSize(arcList, cptList, noDataPoints, noStates):
    mdlSize = 0.0
# Coursework 3 task 3 begins here


# Coursework 3 task 3 ends here 
    return mdlSize 
#
# Function to calculate the joint probability of a single data point in a Network
def JointProbability(dataPoint, arcList, cptList):
    jP = 1.0
# Coursework 3 task 4 begins here


# Coursework 3 task 4 ends here 
    return jP
#
# Function to calculate the MDLAccuracy from a data set
def MDLAccuracy(theData, arcList, cptList):
    mdlAccuracy=0
# Coursework 3 task 5 begins here


# Coursework 3 task 5 ends here 
    return mdlAccuracy
#
# End of coursework 2
#
# Coursework 3 begins here
#
def Mean(theData):
    realData = theData.astype(float)
    noVariables=theData.shape[1] 
    mean = []
    # Coursework 4 task 1 begins here



    # Coursework 4 task 1 ends here
    return array(mean)


def Covariance(theData):
    realData = theData.astype(float)
    noVariables=theData.shape[1] 
    covar = zeros((noVariables, noVariables), float)
    # Coursework 4 task 2 begins here


    # Coursework 4 task 2 ends here
    return covar
def CreateEigenfaceFiles(theBasis):
    adummystatement = 0 #delete this when you do the coursework
    # Coursework 4 task 3 begins here

    # Coursework 4 task 3 ends here

def ProjectFace(theBasis, theMean, theFaceImage):
    magnitudes = []
    # Coursework 4 task 4 begins here

    # Coursework 4 task 4 ends here
    return array(magnitudes)

def CreatePartialReconstructions(aBasis, aMean, componentMags):
    adummystatement = 0  #delete this when you do the coursework
    # Coursework 4 task 5 begins here

    # Coursework 4 task 5 ends here

def PrincipalComponents(theData):
    orthoPhi = []
    # Coursework 4 task 3 begins here
    # The first part is almost identical to the above Covariance function, but because the
    # data has so many variables you need to use the Kohonen Lowe method described in lecture 15
    # The output should be a list of the principal components normalised and sorted in descending 
    # order of their eignevalues magnitudes

    
    # Coursework 4 task 6 ends here
    return array(orthoPhi)

#
# main program part for Coursework 1
#
noVariables, noRoots, noStates, noDataPoints, datain = ReadFile("HepatitisC.txt")
theData = array(datain)
dl = DependencyList(DependencyMatrix(theData, noVariables))
print len(dl)
st =  SpanningTreeAlgorithm(dl, noVariables)
print len(st)
print st
print dl
#ppendString("results.txt","Coursework Two Results by eg08")
#ppendString("results.txt","") #blank line)

