@include "lib.awk"
@include "abcd.awk"
@include "globals.awk"

function _nbc(     _NBC,what,who,opts,o) {
  what = "nbc: a Naive Bayes classifier (discrete values)"
  who  = "(c) 2012 tim@menzies.us"
  opts = "-m,2,-k,1,--loud,0,-sep,comma,"\
         "-absent,?,-all,_,"\
         "--help,0,--copyleft,0,-seed,1,-x,10,"\
         "-train,data/weatherd.csv,"\
         "-test,nil" 
  if(args(opts,o)) 
  { if (o["--help"]) 
      return prints(what,who, " ",
	" -absent C  : character for empty cells; default= ?",
	" -all X     : not a class; default= '_'",
	" --copyleft : prints copyright info",
	" --help     : prints help text",
        " -k N       : handles rare classes; default= 1",
	" --loud     : verbose mode",
	" -m N       : handles rare values; default= 2",
        " -seed N    : seed",
	" -test  FILE: test data. If not set, do cross-val ",
	" -train FILE: traning data ",
        " -x N       : how many ways for the cross-val; default= 10")
    if (o["--copyleft"])
      return copyleft(what,who)
    m       = o["-m"]
    k       = o["-k"]
    all     = o["-all"] 
    verbose = o["--loud"]
    absent  = o["-absent"]
    sep     = o["-sep"]
    x       = o["-x"]
    if (sep=="comma") 
      sep= "," 
    nbc(o["-train"],o["-test"],o["-seed"], _NBC) 
}}
function nbc(trainFile,testFile,s, _NBC,
             _ABCD,trainData,i,n,tests,repeat) {
  rx0 = "k,m"
  rx  = k","m
  db  = trainFile
  seed(s)
  n =  file2data(trainFile,trainData)
  if (testFile == "nil") 
    for(repeat=1; repeat<=x; repeat++) 
      xval(repeat,trainData,n,_NBC,_ABCD) 
  else 
    trainTest(trainData,testFile,n,_NBC,_ABCD)
  print ""
  reports_ABCD(_ABCD,all)
}
function file2data(file,data,    row,col,com) {
  delete data
  com = "cat " file
  while((com | getline)  > 0)   
    if(row++) 
      for(col=1;col<=NF;col++)
	data[row][col] = $col
  close(com)
  return NF
}
function xval(repeat,trainData,n,_NBC,_ABCD,
	      i,tests,h,f) {
  say(" "repeat)
  for(i in trainData) {
    if (rand() > 1/x)
      train(n,trainData[i],h,f,_NBC)
    else 
      tests[i] = i 
  }
  for(i in tests)
    test(n,trainData[i],h,f,_NBC,_ABCD)
}
function trainTest(trainData,testFile,_NBC,_ABCD,  
		             testData,n,i,h,f) {
  for(i in trainData)
    train(n,trainData[i],h,f,_NBC)
  n = file2data(testFile,testData)
  for(i in testData)
    test(n,testData[i],h,f,_NBC,_ABCD)
}
function test(n,data,h,f,_NBC,_ABCD,
                actual,predicted,belief,likes) {
  actual    = data[n]
  predicted = classify(n,data,likes,h,f,_NBC)
  belief    = round(likes[all])
  abcd(actual,predicted,_ABCD)  
}
function train(n,    #!posint
	       data, #!list
	       h,f,_NBC, #!mutl
	       cl,i,val) { 
   cl = data[n];     
   h[cl]++   
   h[all]++
   for(i=1;i<n;i++)
     if ((val = data[i]) != absent)
       f[cl][i][val]++
}
function classify(n,data,likes,h,f,_NBC,  
	      c,i,val,like,classes,prior,tmp,out,
              c1,c2,c3,f1,f2,f3,mode,most) {  
  like = -100000000
  classes = length(h) - 1
  for(c in h) {  
    c1    = h[c]  # raw class frequency count
    if (c1 > most) {mode = c ; most = c1}
    c2    = h[all] # total of of all close counts
    c3    = k*classes  # kludge got total counts
    prior = (c1 + k)/ (c2+c3) # prior belief in "c"
    tmp = log(prior) 
    for(i=1;i<n;i++)
      if ((val = data[i]) != absent) {
	f1 = f[c][i][val] # raw frequency counts
	f2 = m*prior    # n1 low frequency kludge
        f3 = h[c] + m   # hypothesis low freq kludge
	tmp += log((f1+f2)/f3)
      }
    likes[c] = like
    if (c != all)
      if (tmp >= like ) {  
	like = tmp; out = c }}
  return out ? out : mode
}
