;This file is part of C45IDL.
;
;C45IDL is free software: you can redistribute it and/or modify
;it under the terms of the GNU Lesser General Public License as published by
;the Free Software Foundation, either version 3 of the License, or
;(at your option) any later version.
;
;C45IDL is distributed in the hope that it will be useful,
;but WITHOUT ANY WARRANTY; without even the implied warranty of
;MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
;GNU Lesser General Public License for more details.
;
;You should have received a copy of the GNU Lesser General Public License
;along with C45IDL.  If not, see <http://www.gnu.org/licenses/>. 

;--------------------------------------------------------------------------------------------------
; Arquivo: dtc.pro
; Objetivo:
;  Criar a arvore de decisao a partir de dados de treinamento usando o algoritmo C45
; Autor: Mauricio C. M. de Paulo
; Data: 15/07/2011
;--------------------------------------------------------------------------------------------------

;Binary logarithm
FUNCTION alog2, x
  return, alog(x)/0.69314718055995D
END

;Finds the Maximum Gain Ratio in the given subset
FUNCTION subsetMaxGain, curSet, classes
  nAttr=(n_elements(curSet[0,*]))
  maxTreshold=0
  maxAttr=0
  maxGain=0
  for i =1L, nAttr-1 do begin ;Fix for attribute
    pair=attrMaxGainRatio(curSet,classes, i) 
    if maxGain lt pair[1] then begin ;pair is maxTreshold, maxGain
      maxTreshold=pair[0]
      maxGain=pair[1]
      maxAttr=i
    endif
  end
  pair=FLTARR(2)
  pair=[maxAttr, maxTreshold]
  return, pair ; the pair contains maxAttr and maxTreshold
END

;searches for maximwhere(curSet[*,attrIndex] gt thresholdum gain in a given attribute
;this function can be optmized in many ways. One is use the fact that adjacent possible values of the same class are always not optimum. 
FUNCTION attrMaxGainRatio, curSet, classes, attrIndex
  possibleValues=availableAttrValues(curSet, attrIndex)
  maxGain=0
  maxTreshold=0
  if n_elements(possibleValues) le 1 then begin ;then there is only one possible value
    pair=[0,0]
    return, pair
  endif
  for i = 0L, n_elements(possibleValues)-1 do begin
    testTreshold=possibleValues[i]
    testGain=gainRatio(curSet, classes, attrIndex, testTreshold)
    if (maxGain lt testGain) then begin 
      maxGain=testGain
      maxTreshold=testTreshold
    endif
  endfor 
  pair =FLTARR(2)
  pair=[maxTreshold,maxGain]
  return, pair ; the pair constains maxTreshold and maxGain
END

;generates a sample Set for testing purposes
FUNCTION sampleSet, n ;first column is the class
  return, [[indgen(2*n,/L64) / n ],[floor(randomn(seed,n)*10),floor((randomn(seed,n)+0.5)*10)],[floor(randomn(seed,n)*10),floor((randomn(seed,n)+0.5)*10)],[floor(randomn(seed,n)*10),floor((randomn(seed,n)+0.5)*10)]]
END

;receives a list of probabilities and returns the entropy value
FUNCTION entropy, probList 
  return, total(-problist*alog2(problist))
END


;computes split and gain 
FUNCTION gainRatio, curSet, classes, attrIndex, threshold
  s1Ids=where(curSet[*,attrIndex] gt threshold)
  s2Ids=where(curSet[*,attrIndex] le threshold)
  D1=n_elements(s1Ids)*1.
  D2=n_elements(s2Ids)*1.
  ;in case any subset degenerate, no gain is found
  if (D1 eq 1) or (D2 eq 1) then return, 0

  S1=curSet[s1Ids,*]
  S2=curSet[s2Ids,*]
  Dt=n_elements(curSet[*,0])*1.

  setSplit=-D1/Dt *alog2(D1/Dt) - D2/Dt *alog2(D2/Dt)
  E0=entropy(proportion(curSet))
  E1=entropy(proportion(S1))
  E2=entropy(proportion(S2))
  setGain=E0-E1*D1/Dt -E2*D2/Dt
  return, (setGain/setSplit)
END

;list available values for an attribute in the set
FUNCTION availableAttrValues, curSet, attrIndex
  ;This algorithm only considers a possible value if there is a class change between two sequential values
  sortedSet=curSet[sort(curSet[*,attrIndex]),*] ;sort by the attribute
  ids=where(sortedSet[*,0] ne shift(sortedSet[*,0],1))
  valDifClass=(sortedSet[ids,attrIndex]+shift(sortedSet[ids,attrIndex],1))/2
  ;for i=1L, n_elements(sortedIds)-1 do begin
  ;  if (curSet[sortedIds[i],0] ne curSet[sortedIds[i-1],0]) and (curSet[sortedIds[i],attrIndex] ne possibleValues[n_elements(possibleValues)-1]) then begin
  ;    possibleValues=[possibleValues,curSet[sortedIds[i],attrIndex]]
  ;  endif
  ;endfor
  return, valDifClass 
  ;curSet[uniq(curSet[*,attrIndex], sort(curSet[*,attrIndex])),attrIndex]
END

;list the classes available in a set
FUNCTION listClasses, sampleSet
  ;TODO: Here i think that the minimum and maximum can be used as classes are usually sequencial
  return, sampleSet[uniq(sampleSet[*,0],sort(sampleSet[*,0]))]
END

;returns the proportion of each class
FUNCTION proportion, curSet
  classes=listClasses(curSet)
  nclasses=n_elements(classes)
  if nclasses eq 1 then return, 1
	prob=FLTARR(nclasses) ; classes count
	nCurSamples=(size(curSet))[1]
	for k = 0, nclasses-1 do begin
		freq=(size(where(curSet[*,0] eq classes[k])))[1]
		prob[k]=freq*1./nCurSamples ; each class count over the total count
	endfor
	return, prob
END

;first column is the class attribute

;appends a node to the tree
FUNCTION appendNode, tree, attrIndex, threshold, nextLeftNode, nextRightNode, majorClass
  ;Node's information: attrIndex, treshold, nextLeftNode, nextRightNode
  ;If nextRightNode=nextLeftNode then it's a leaf of the class nextLeafNode
  newNodeRow=transpose([attrIndex,threshold,nextLeftNode, nextRightNode,majorClass])
  if tree[0,0] eq -1 then begin 
  tree=newNodeRow ;this means the tree was empty and the node is the first node
  end else tree=[tree,newNodeRow]
  return, n_elements(tree[*,0])-1
END

FUNCTION isLeaf, nodeId, tree
  if (tree[nodeId,2] eq tree[nodeId,3]) then begin 
    return, 1
  endif else begin
    return,0
  endelse
END

FUNCTION getMajorClass, curSet 
    p=proportion(curSet)
    maxProportion=max(p)
    classes=listClasses(curSet)
    maxClass=classes[(where(p eq maxProportion))[0]]
    return, maxClass
END
;Recurssive function that produces one leaf and returns it's index in the tree's leaf list
FUNCTION C45, curSet, tree, minPixels, leafAccuracy
  classes=listClasses(curSet)
  ;if (size(classes))[0] eq 0 then begin ;this is a pure node
  ; Pre prunning
  majorClass=getMajorClass(curSet)
  if prePrunning(curSet, minPixels, leafAccuracy) or (n_elements(classes) le 1) then begin
    ;assigns the class with bigger proportion
    newIndex=appendNode(tree, majorClass,majorClass,majorClass,majorClass, majorClass)
  endif else begin ;still need to split
    pair=subsetMaxGain(curSet,classes) ; maxAttr and maxTreshold
    newIndex=appendNode(tree, pair[0], pair[1], 0, 0, majorClass) ;the indexes are still unknown
    writeMsg, strtrim(n_elements(tree[*,0]),1) + " calls processed."
    S1= curSet[where(curSet[*,pair[0]] gt pair[1]),*]
    tree[newIndex,2]=C45(S1,tree,minPixels,leafAccuracy)
    S2= curSet[where(curSet[*,pair[0]] le pair[1]),*]
    tree[newIndex,3]=C45(S2,tree,minPixels,leafAccuracy)
  endelse
  ;if ~(n_elements(tree[*,0]) mod 10) then
  return, newIndex
END

FUNCTION preprunning, curset, minPixels, leafAccuracy
  ;minimum number of pixels
  isMinPixels=(n_elements(curSet[*,0]) lt minPixels)
  ;max proportion
  pmax=max(proportion(curSet))
  maxProp=1-leafAccuracy
  maxProportion= (pmax gt maxProp)
  return, (isMinPixels or maxProportion)
END

;Approximate a number for printable output
FUNCTION approx, x
  return, strtrim(x,1)
END

;Recursive function that return a list of lines that ENVI can read. Use: print, s, format='(A)'
FUNCTION nodeToEnvi, unit, tree, ctable, nodeIndex, locX, locY, parent
  isLeaf=0
  if tree[nodeIndex,2] eq tree[nodeIndex,3] then isLeaf=1
  ;check if its a root node
  if locY eq 1 then begin 
    nodeName="Node "+approx(locY)
  endif else begin
    nodeName="Node "+approx(locY)+"-"+approx(locX)
  endelse
  s=["begin node"]
  if isLeaf eq 0 then begin
    s=[s,'  name = "'+nodeName+'"']
    s=[s,'  type = Decision'] 
  end else begin
    s=[s,'  name = "Class '+approx(FLOOR(tree[nodeIndex,0])+1)+'"']
    s=[s,'  type = Result'] ;
  end
  s=[s,'  location = '+approx(locY)+','+approx(locX)]
  if locY ne 1 then begin
    s=[s,'  parent name = "'+parent+'"']
    ;if (locX mod 2) then begin
      s=[s,'  parent decision = Yes']
    ;endif else begin
    ;  s=[s,'  parent decision = Yes']
    ;endelse
  end
  if isLeaf eq 0 then begin
    s=[s,'  expression = "B'+approx(FLOOR(tree[nodeIndex,0]))+' gt '+approx(tree[nodeIndex,1])+'"']
  end else begin 
    s=[s,'  class value = '+approx(FLOOR(tree[nodeIndex,1]))]
    colour=long(ctable[*,tree[nodeIndex,0]])
    s=[s,'  class rgb = '+STRTRIM(colour[0],1)+','+STRTRIM(colour[1],1)+','+STRTRIM(colour[2],1)]
  end
  s=[s,'end node','']
  printf, unit, s, format='(A)'
  if isLeaf eq 0 then begin
    res=nodeToEnvi(unit,tree,ctable, tree[nodeIndex,2], (locX-1)*2L+2L, locY+1L,nodeName)
    res=nodeToEnvi(unit,tree,ctable, tree[nodeIndex,3],  (locX-1)*2L+1L, locY+1L,nodeName)
  end
  return, 0
END

PRO exportTreeToEnvi, tree, filename
  OPENW, unit,filename, /GET_LUN
  printf, unit, "ENVI Decision Tree Text File (version=1.0)"
  printf, unit, ""
  nclasses=n_elements(UNIQ(tree[*,4] , SORT(tree[*,4])))
  ctable=newColorTable(nclasses)
  res=nodeToEnvi(unit,tree,ctable, 0L,1L,1L);unit, 
  Free_Lun, unit
END

;Receives a set and a percentage and returns a dictionary with randomly splitted validation and training sets.
FUNCTION splitValidationSamples, curSet, percent
  classes=listClasses(curSet)
  ;create an array that will nodeToEnvistore the group index of each pixel
  splitIds=bytarr(n_elements(curSet[*,0]))
  ;for each class, splits the pixels inside it
  for i=0L, n_elements(classes)-1 do begin
    pixelsId=where(curSet[*,0] eq classes[i])
    npixels=n_elements(pixelsId)
    group=randomu(seed,npixels) gt percent
    splitIds[pixelsId]=group
  endfor
  res={validation: curSet[where(splitIds eq 1),*], training: curSet[where(splitIds eq 0),*] }
  return, res
END

;finds the parent node Id in the matrix
FUNCTION findParentNode, tree, nodeId
  greater=where(tree[*,2] eq nodeId)
  lesser=where(tree[*,3] eq nodeId)
  if greater eq -1 then begin 
    return, lesser[0] 
  endif else begin
    return, greater[0]
  endelse
END

;Recurssive function that prunes a subtree. Returns the number os misclassified samples
FUNCTION PruneNode, curSet, idCurNode, tree
  ;If this is a leaf, returns how many classification errors there are.
  if tree[idCurNode,2] eq tree[idCurNode,3] then begin 
    ;Checks the error of assigning every sample to the leaf's class
    imgClass=bytarr(n_elements(curSet[*,0]))+tree[idCurNode,2]
    nErrors=classificationErrors(imgClass,curSet[*,0])
    return, nErrors
  endif else begin 
  ;this is not a leaf and should be checked for pruning
  
    ;finds the error of assigning everyone to the major class
    attrIndex=tree[idCurNode,0]
    threshold=tree[idCurNode,1]
    greaterIds=where(curSet[*,attrIndex] gt threshold)
    lesserIds=where(curSet[*,attrIndex] le threshold)
    if greaterIds[0] ne -1 then begin
      ;curClassSet[greaterIds]=greaterNodeId
      nErrorsLeft=PruneNode(curSet[greaterIds,*], tree[idCurNode,2], tree)
    endif else nErrorsLeft=0
    if lesserIds[0] ne -1 then begin
      ;curClassSet[lesserIds]=lesserNodeId
      nErrorsRight=PruneNode(curSet[lesserIds,*], tree[idCurNode,3], tree)
    endif else nErrorsRight=0
    
    
    
    maxClass=tree[idCurNode,4];getMajorClass(curSet)
    pruneClass=bytarr(n_elements(curSet[*,0]))+maxClass
    pruneError=classificationErrors(pruneClass,curSet[*,0])
    
    if (pruneError le (nErrorsLeft+nErrorsRight) ) then begin
      ;The classification improves when prunned
      tree[idCurNode,*]=maxClass
      finalError=pruneError
    endif else finalError=nErrorsLeft+nErrorsRight
    ;maxProportion= (pmax gt maxProp)
    return, finalError
  endelse
END

FUNCTION countNodes, tree
  count=n_elements(tree[where(tree[*,2] ne tree[*,3]),0])
  return, count
END

FUNCTION classificationErrors, set1, set2
  nErrors=n_elements(where( (set2-set1) ne 0))
  return, nErrors
END

FUNCTION newColorTable, nclasses
  ctable=fltarr(3,nclasses)
  input=fltarr(3,nclasses)
  input[0,*] = FINDGEN(nclasses) / (nclasses - 1)*360
  input[1,*]=1
  input[2,*]=1 
  COLOR_CONVERT, input, ctable ,/HSV_RGB
  return, ctable
END

PRO dtcClassifier, samplesFile, imageFile, splitPercent, minNumberPixels, minLeafPercentage, classFile, treeFile
  ;loading samples
  writeMsg, "Loading samples..."
  samplesSet=carregaramostrastreinamentoF(samplesFile);
  ;splitting in two groups: training and validation
  writeMsg, "Found "+strtrim(n_elements(samplesSet[*,0]),2)+" samples to describe "+strtrim(n_elements(listClasses(samplesSet)),2)+" classes." 
  writeMsg, "Splitting in two groups: training and validation.."
  if splitPercent eq 1 then begin
    samplesSet={training: samplesSet}
  endif else begin
    samplesSet=splitValidationSamples(samplesSet,splitPercent)
  endelse
  tree=transpose([-1,0,0,0,0]) ;root node is replaced later
  writeMsg, "Building the tree..."
  index=C45(samplesSet.training,tree,minNumberPixels,minLeafPercentage)
  nodesBefore=n_elements(tree[*,0])
  nodeCount=countNodes(tree)
  writeMsg, strtrim(nodeCount)+" nodes created."
  writeMsg, "Post prunning..."
  
  if splitPercent ne 1 then begin
    error=PruneNode(samplesSet.validation, 0,tree)
    nodeCount=countNodes(tree)
    writeMsg, strtrim(nodeCount)+ " nodes remained after prunning."
    writeMsg, "Validation error: "+strtrim(error*100./n_elements(samplesSet.validation[*,0]),2)+"%"
  endif
  writeMsg, "Saving the tree as an ENVI's tree..."
  if treeFile ne "" then begin
    exportTreeToEnvi, tree, treeFile
  endif
  
  if imageFile ne '' then begin
    ;Classify only if an input image was chosen
    writeMsg, "Loading the image to classify..."
    image=carregarImagem(imageFile)
    npixels=n_elements(image.pixelsImagem[*,0])
    writeMsg, "Classifying..."
    imgClass=classify(image.pixelsImagem, tree)
    writeMsg, "Creating the output image..."
    writeClassImg, imgClass, image.ncolunas, image.geoheader, classFile;, '/home/mauriciodev/Cartografia_Sistematica/Rio/classes.tif'
  endif
  writeMsg, "Done."
END



PRO tests, size
;  sampleTest=sampleSet(size)
;  classes=listClasses(sampleTest)
;  print, classes
;  p0=proportion(sampleTest)
;  print, p0 
;  print, "Entropy:", entropy(p0)
;  print, "Available attribute list:", availableAttrValues(sampleTest, 0)
;  print, "Gain:", gain(sampleTest, classes, 0, 0)
;  print, "Gain ratio:", gainRatio(sampleTest,classes, 0,0), split(sampleTest,classes,0,0)
;  print, "Attribute with maximum gain: ", attrMaxGainRatio(sampleTest, classes, 1)
;  firstSplit=subsetMaxGain(sampleTest,classes)
;  print, "First split", firstSplit
  tree=[-1,0,0,0,0]
  ;index=C45(sampleTest, tree, size/50)
  
;  print, findParentNode(tree, 50)
;  nodeIndex=appendNode(tree,firstSplit[0],firstSplit[1],1,2)
;  nodeIndex=appendNode(tree,0,0,0,0)
;  nodeIndex=appendNode(tree,1,1,1,1)
  f='/mond_a3c5';'\sintetico-rgb';
  folder='/home/mauriciodev/Desktop/INPE/reconhecimento_de_padroes/workspace/c45idl/'
  ;folder='C:\Dados\Mauricio\idlWorkspace\dtc\'
  samplesFile=folder+'samples'+f+'.txt'
  imageFile=folder+'samples'+f+'.tif'
  treeFile=folder+'results'+f+'.txt'
  classFile=folder+'results'+f+'_class.tif'
  dtcClassifier, samplesFile, imageFile, 0.5, 25, 0.01,classFile, treeFile
  ;index=C45(SampleTest,tree,50)
  ;print, tree
END

PRO c45idl
  dtcui
  ;tests, 100
END