#include "dnbbinary.h"
#include <cmath>
using namespace std;

DNBBinary::~DNBBinary(){
}

DNBBinary::DNBBinary(){
}

void DNBBinary::setTargetClass(int classid){
    m_targetClass = classid;
}

void DNBBinary::saveClassifier(fstream &file){
    int multiNominalWord = m_multiNominalWord?1:0;
    file.write(reinterpret_cast<char *>(&multiNominalWord), sizeof(int));

    file.write(reinterpret_cast<char *>(&m_classIndex), sizeof(int));
    file.write(reinterpret_cast<char *>(&m_numAttributes), sizeof(int));
    file.write(reinterpret_cast<char *>(&m_targetClass), sizeof(int));

    file.write(reinterpret_cast<char *>(&m_WordLaplace), sizeof(double));
    file.write(reinterpret_cast<char *>(&m_classRatio), sizeof(double));
    file.write(reinterpret_cast<char *>(&m_wordRatio), sizeof(double));

    file.write(reinterpret_cast<char *>(m_wordsPerClass), sizeof(double)*2);
    file.write(reinterpret_cast<char *>(m_classDistribution), sizeof(double)*2);
    file.write(reinterpret_cast<char *>(m_coefficient), sizeof(double)*m_numAttributes);

    for (int c = 0; c < 2; c++) {
        file.write(reinterpret_cast<char *>(m_perWordPerClass[c]), sizeof(double)*m_numAttributes);
    }
}

void DNBBinary::loadClassifier(fstream &file){
    int multiNominalWord;
    file.read(reinterpret_cast<char *>(&multiNominalWord),sizeof(int));
    m_multiNominalWord = multiNominalWord==1?true:false;

    file.read(reinterpret_cast<char *>(&m_classIndex), sizeof(int));
    file.read(reinterpret_cast<char *>(&m_numAttributes), sizeof(int));
    file.read(reinterpret_cast<char *>(&m_targetClass), sizeof(int));

    file.read(reinterpret_cast<char *>(&m_WordLaplace), sizeof(double));
    file.read(reinterpret_cast<char *>(&m_classRatio), sizeof(double));
    file.read(reinterpret_cast<char *>(&m_wordRatio), sizeof(double));

    m_perWordPerClass = new double *[2];  //dxp: 单个词属于与不属于类别的概率
    for(int i=0; i<2; i++){
        m_perWordPerClass[i] =  new double[m_numAttributes];
    }
   
    m_wordsPerClass = new double[2];            //dxp: 初始化 a*log(a) a=m_numAttributes
    m_classDistribution = new double[2];        //dxp: 初始都为1
     m_coefficient = new double[m_numAttributes];

    file.read(reinterpret_cast<char *>(m_wordsPerClass), sizeof(double)*2);
    file.read(reinterpret_cast<char *>(m_classDistribution), sizeof(double)*2);
    file.read(reinterpret_cast<char *>(m_coefficient), sizeof(double)*m_numAttributes);
    for (int c = 0; c < 2; c++) {
        file.read(reinterpret_cast<char *>(m_perWordPerClass[c]), sizeof(double)*m_numAttributes);
    }
}


void DNBBinary::initClassifier(int numAttributes, bool multiNominal){
    m_numAttributes = numAttributes;
    m_classIndex = numAttributes;
    m_multiNominalWord = multiNominal;
    m_perWordPerClass = new double *[2];  //dxp: 单个词属于与不属于类别的概率
    for(int i=0; i<2; i++){
        m_perWordPerClass[i] =  new double[m_numAttributes];
    }
    m_coefficient = new double[m_numAttributes];
    m_wordsPerClass = new double[2];            //dxp: 初始化 a*log(a) a=m_numAttributes
    m_classDistribution = new double[2];        //dxp: 初始都为1
    m_WordLaplace = log(m_numAttributes);

    //Laplace
    for (int c = 0; c < 2; c++) {
        m_classDistribution[c] = 1;
        m_wordsPerClass[c] = m_WordLaplace * m_numAttributes;
        for(int i=0; i<m_numAttributes; i++) m_perWordPerClass[c][i] = m_WordLaplace;
    }
}

void DNBBinary::destoryClassifier(){
    if(m_perWordPerClass){
        for(int i=0;i<2; i++){
            delete []  m_perWordPerClass[i];
            m_perWordPerClass[i] = NULL;
        }
        delete [] m_perWordPerClass;
        m_perWordPerClass = NULL;
    }

    if(m_coefficient){
        delete [] m_coefficient;
        m_coefficient = NULL;
    }

    if(m_wordsPerClass){
        delete []  m_wordsPerClass;
        m_wordsPerClass = NULL;
    }

    if(m_classDistribution){
        delete [] m_classDistribution;
        m_classDistribution = NULL;
    }
}

void DNBBinary::updateClassifier(map<int,int> & instance){
    int classIndex = 0;
    if (instance[m_numAttributes] != m_targetClass) classIndex = 1;
    
    double probOfClassGivenDoc[2];
    double ratio = getLogProbForTargetClass(instance);
    if (ratio > 709){
        probOfClassGivenDoc[0]=1;
    }else{
        ratio = exp(ratio);
        probOfClassGivenDoc[0] = ratio / (1 + ratio);
    }
    probOfClassGivenDoc[1] = 1 - probOfClassGivenDoc[0];

    //double prob = 1 - probOfClassGivenDoc[classIndex];
    double prob = probOfClassGivenDoc[1-classIndex];

    double weight = prob * 1.0;   // instance的权重统计设置为1.

    for (map<int,int>::iterator it=instance.begin(); it!=instance.end(); ++it) {
        if (it->first != m_classIndex ){
            if (!m_multiNominalWord) {
              if (it->second > 0) {
                m_wordsPerClass[classIndex] += weight;
                m_perWordPerClass[classIndex][it->first] += weight;
              }
            } else {
              double t = it->second * weight;
              m_wordsPerClass[classIndex] += t;
              m_perWordPerClass[classIndex][it->first] += t;
            }
            //update coefficient
            m_coefficient[it->first] = log(m_perWordPerClass[0][it->first] /
                m_perWordPerClass[1][it->first]);
        }
    }
    m_wordRatio = log(m_wordsPerClass[0] / m_wordsPerClass[1]);
    m_classDistribution[classIndex] += weight;
    m_classRatio = log(m_classDistribution[0] / m_classDistribution[1]);
}

double DNBBinary::getLogProbForTargetClass(map<int,int> & instance){
    double probLog = m_classRatio;
    for (map<int,int>::iterator it=instance.begin(); it!=instance.end(); ++it) {
        if (it->first != m_classIndex ){    //类别所在列是不确定的
            if (!m_multiNominalWord){
                if (it->second > 0){
                    probLog += m_coefficient[it->first] - m_wordRatio;
                }
            }else{
                probLog += (it->second) * (m_coefficient[it->first] - m_wordRatio);
            }
        }
    }
    return probLog;
}
