open Printf
open Ext

let big_int = 1000000000 (* = 1 billion *)

(* Globals used for command line parameters *)

let datafile = ref ""
let schemafile = ref ""
let dnoutfile = ref ""
let mincount = ref 10

(* The weight the score function assigns to each additional split created,
   i.e., the penalty for each additional parameter. *)
let split_cost = ref 0.0
let kappa = ref 0.0

(* Output models for different per-split thresholds.  This is good for
 * tuning the per-split penalty in a single training pass, rather than
 * n separate training runs.
 *)
let thresh = ref false

(* Prior counts *)
let prior = ref 1.0


let usage = "Usage: dnlearn -i <input> -o <output> [...]"
let args = Arg.align
 ([("-i", Arg.Set_string datafile, " Training data file") ;
   ("-s", Arg.Set_string schemafile, " Data schema (optional)") ;
   ("-o", Arg.Set_string dnoutfile, " Output dependency network (XMOD format)") ;
   ("-ps", Arg.Set_float split_cost, " Per-split penalty") ;
   ("-kappa", Arg.Set_float kappa, " Alternate per-split penalty specification") ;
   ("-prior", Arg.Set_float prior, " Prior counts");
   ("-mincount", Arg.Set_int mincount, 
     " Minimum number of examples at each leaf [10]")]
   @ common_arguments)

(*

 learn_dt data var z_ij w_ij =


 *)

let counts_to_probs prior counts =
  let total = prior *. (float_of_int (Array.length counts))
    +. float_of_int (Array.sum counts) in
  Array.map (fun x -> (prior +. float_of_int x) /. total) counts
  
(* Squared loss of var on data, according to distribution dist *)
let dist_sql dist counts = 
  let sql p q = (float_of_int p) *. (-. q *. q) in 
  Array.sumf (Array.map2 sql counts dist) 

(* Log loss of var on data, according to distribution dist *)
let dist_ll dist counts = 
  let ll p q = if p > 0 then (float_of_int p) *. (log q) else 0. in
  Array.sumf (Array.map2 ll counts dist) 


(* Split score is change in log likelihood *)
let split_score pro_c con_c =
  let total_c = Array.map2 ( + ) pro_c con_c in
  (* Must have at least mincount examples at each leaf. *)
  if Array.length pro_c < !mincount || Array.length con_c < !mincount then 0.0
  else
    let total_p = counts_to_probs !prior total_c in
    let pro_p = counts_to_probs !prior pro_c in
    let con_p = counts_to_probs !prior con_c in
    (dist_ll pro_p pro_c +. dist_ll con_p con_c) -. dist_ll total_p total_c
 

let split_counts_l var numvals data va =
  let v_var = Array.map fst va in
  let v_val = Array.map snd va in
  (* TODO: Reuse matrices instead of re-allocating them each time? *)
  let pro_a = Array.create_matrix numvals (Array.length va) 0 in
  let con_a = Array.create_matrix numvals (Array.length va) 0 in
  for i = 0 to Array.length data - 1 do
    let x = data.(i) in
    let xval = x.(var) in 
    for j = 0 to Array.length va - 1 do
      if x.(v_var.(j)) = v_val.(j) then
        pro_a.(xval).(j) <- pro_a.(xval).(j) + 1
      else
        con_a.(xval).(j) <- con_a.(xval).(j) + 1
    done
  done;
  let pro_a' = Array.transpose pro_a in
  let con_a' = Array.transpose con_a in
  (pro_a', con_a')


let best_split schema data var va =
  let (pro, con) = split_counts_l var schema.(var) data va in
  let scores = Array.map2 split_score pro con in
  if log_exists log_debug then begin
    let print_score i score =
      let (var, value) = va.(i) in 
      dlogf "Split %d (v%d=%d): %f\n" i var value score in
    Array.iteri print_score scores
  end;
  let i = max 0 (Array.pos_maxf scores) in
  let (var, value) = va.(i) in
  (var, value, scores.(i), pro.(i), con.(i))


let rec learn_dt schema data v parentscore =
  (* Get the best split for the current node *)
  let gen_splits var dim =
    if var = v then []
    else if dim = 2 then [(var, 0)]
    else Array.to_list (Array.init dim (fun i -> (var, i))) in
  let vl = List.flatten (Array.to_list (Array.mapi gen_splits schema)) in
  let va = Array.of_list vl in
  let (var, value, score, pro_c, con_c) = best_split schema data v va in

  (* If the best split isn't good enough, return a leaf.
     Otherwise, recurse on the two subtrees. *)
  if score <= !split_cost || classify_float score = FP_nan then
    let total_c = Array.map2 ( + ) pro_c con_c in
    Bn.Leaf (Array.map log (counts_to_probs !prior total_c))
  else begin
    vlogf "Var %d: Splitting on v%d=%d (score=%f).\n" v var value score;
    if score > parentscore then
      dlogf
"Score of %f is better than parent score of %f. (Not submodular.)\n" 
        score parentscore;
    let (pro_data, con_data) = 
      Array.partition (fun x -> x.(var) = value) data in
    dlogf "Pro: %d; Con: %d\n" (Array.length pro_data) (Array.length
      con_data) ;
    let left = learn_dt schema pro_data v score in
    let right = learn_dt schema con_data v score in
    Bn.Vertex(var, value, left, right)
  end


let do_learn () =
  (* Read in data and determine schema (number of values for each var) *)
  let data = Data.input_example_list (open_in !datafile) in
  vlogf "Loaded data.\n";
  let schema = 
    if !schemafile <> "" then begin
      let schemain = open_in !schemafile in
      let s = Data.input_example schemain in
      close_in schemain ; s
    end
    else Data.schema data in 
  let data = Array.of_list data in

  (* For each variable, build a decision tree and set it as the CPD *)
  Timer.start "dnlearn";
  let numvars = Array.length schema in
  let bn = Bn.create_empty_network schema in
  (* let cpts = Array.init numvars (learn_dt schema data) in
  Array.iteri (Bn.set_cptree bn) cpts *)
  for i = 0 to numvars - 1 do
    let root = learn_dt schema data i infinity in
    Bn.set_cptree bn i root
  done;
  vlogf "Learning time: %f seconds\n" (Timer.elapsed "dnlearn");

  (* Save to disk *)
  Bn.output_xmod (open_out !dnoutfile) bn


let main () = 
  Arg.parse args ignore usage;
  if !datafile = "" || !dnoutfile = "" then
    Arg.usage args usage
  else begin
    if !kappa > 0.0 then
      split_cost := -.(log !kappa);
    common_log_init ();
    do_learn ()
  end

let _ = main ()
