#include "LearnedModel.h"

// TODO: we need to override nextStep to learn during test graph

void LearnedModel::learnParams() {
    const int nNodes = 100, times= 100, stagnantTolerance = 5; //NOTE: Configurable params
    coeffIn = coeffOut = 1.0;
    //delta = 1.0;
    unknownScore = 1.0;
    sampleCount = 0.0;
    sumNormalizer = 0.0;
    nodeNormalizer = nNodes;


    API *savedTestNetwork = network;
    for(int times_count = 0; times_count < times; times_count++) {
        //TODO: generate graphs and populate params
        PNGraph graph = TSnap::GenForestFire(nNodes, 0.35, 0.32);
        network = new API(graph);
        network->enableCache();

        int nodeFound = 0;
        TIntPr oldRecall = network->evaluate();
        int stagnameSteps = 0, oldCost = network->getTotalCost();
        while(true) {
            int nid = nodeToExplore();
            TIntPr deg = network->getDegree(nid), discovDeg = network->getDiscoveredDegree(nid);

            ///////Code part from nextStep
            TVec<TInt> rec;
	    network->getAllFollowers(nid, rec);
            network->getAllFollowees(nid, rec);
            for(TVec<TInt>::TIter i = rec.BegI(); i != rec.EndI(); i++)
                updateDiscoveredNode(i->Val);
            TIntPr p = network->evaluate();
            ///////Done next step

            int newCost = network->getTotalCost();
            TIntPr newRecall = network->evaluate();
            TIntPr gain = TIntPr(newRecall.Val1 - oldRecall.Val1, newRecall.Val2 - oldRecall.Val2);
            nodeFound += gain.Val1;
            double normScore = getScore(gain) / double(newCost - oldCost);
            if(oldRecall == newRecall) {
                if(++stagnameSteps > stagnantTolerance) break;
            }
            else if(stagnameSteps) stagnameSteps = 0;

            KEY k(deg, discovDeg);
            if(!params.insert(make_pair(k, normScore)).second) //If there exists already
                params[k] += normScore;

            sampleCount += 1.0; //TODO: make sure we never explored the same node twice
            sumNormalizer += normScore;

            oldCost = newCost;
            oldRecall = newRecall;
        }

        printf("times %d : found %d nodes\n",times_count, nodeFound);
        //Clean up
        delete network;
        unexploredCount = 0;
        visitedNode.clear();
        degreeCount.setZero();
    }

    network = savedTestNetwork;
    network->enableCache();
    //TODO: also learn teleport probability given delta_gain?
}

void LearnedModel::saveParams(char *fname) {
    FILE *fp = fopen(fname, "wb");
    fwrite(&sumNormalizer, sizeof(double), 1, fp);
    fwrite(&nodeNormalizer, sizeof(double), 1, fp);
    fwrite(&sampleCount, sizeof(double), 1, fp);
    fwrite(&delta, sizeof(double), 1, fp);
    fwrite(&unknownScore, sizeof(double), 1, fp);
    fwrite(&coeffIn, sizeof(double), 1, fp);
    fwrite(&coeffOut, sizeof(double), 1, fp);
    for(map<KEY, double>::iterator i = params.begin(); i != params.end(); i++) {
        fwrite(&i->first, sizeof(KEY), 1, fp);
        fwrite(&i->second, sizeof(double), 1, fp);
    }
    fclose(fp);
}

void LearnedModel::loadParams(char *fname) {
    FILE *fp = fopen(fname, "rb");
    fread(&sumNormalizer, sizeof(double), 1, fp);
    fread(&nodeNormalizer, sizeof(double), 1, fp);
    fread(&sampleCount, sizeof(double), 1, fp);
    fread(&delta, sizeof(double), 1, fp);
    fread(&unknownScore, sizeof(double), 1, fp);
    fread(&coeffIn, sizeof(double), 1, fp);
    fread(&coeffOut, sizeof(double), 1, fp);
    while(!feof(fp)) {
        KEY k;
        double score;
        fread(&k, sizeof(KEY), 1, fp);
        fread(&score, sizeof(double), 1, fp);
        params[k] = score;
    }
    fclose(fp);
}

void LearnedModel::updateDiscoveredNode(int discoveredNode) {
    if(visitedNode.find(discoveredNode) == visitedNode.end()) {//First seen this node
        if(unexploredCount == nodeIDs.size())
            nodeIDs.push_back(discoveredNode);
        else nodeIDs[unexploredCount] = discoveredNode;
        visitedNode[discoveredNode] = ++unexploredCount;
    }
    //TODO: scale node in learn graph to general.
    //TODO: smoothing, unknowns, delta params
    // Update the score
    int index = visitedNode[discoveredNode];
    if(index == -1)return; //Already explored
    double score = 0.0; //TODO: UNK & smooth delta thingy
    TIntPr deg = network->getDegree(discoveredNode);
    TIntPr discovDeg = network->getDiscoveredDegree(discoveredNode);
    map<KEY, double>::iterator s = params.find(KEY(deg, discovDeg));
    if (s != params.end()) score += s->second;
    else score += unknownScore;
    score -= degreeCount.Ask(index, index);
    degreeCount.Add(index, score);
}

#ifdef _LEARNED_MODEL_
int main(int argc, char* argv[]) {    
    PNGraph Gtmp = TSnap::GenForestFire(1000, 0.35, 0.32);
    API *a = new API(Gtmp);

    LearnedModel x(a);
    //x.learnParams();
    //x.saveParams("param10000.bin");
    x.loadParams("param10000.bin");
    x.crawl(2000);

    TGnuPlot Gp("NodesExploredLearned", "Nodes Explored");
	Gp.AddPlot(x.getNodeStatistics(), gpwLinesPoints, "x");
    Gp.SetXYLabel("Cost", "Nodes found");
    Gp.SavePng();

    Gp = TGnuPlot("EdgesExploredLearned", "Edges Explored");
	Gp.AddPlot(x.getEdgeStatistics(), gpwLinesPoints, "x");
    Gp.SetXYLabel("Cost", "Edges found");
    Gp.SavePng();
    return 0;
}
#endif
