#!/usr/local/bin/python
#-*- encoding:utf-8 -*-

import numpy as np;
import scipy.sparse as sp
from scipy import linalg
import time
import threading
import os
from multiprocessing import Process, Manager
import sys
import utils
import hashlib
import pls3 as pls

dx = 20134
dy = 20134

basic = 1;
firstbatch = 25;
secondbatch = 49;
thirdbatch = 73;
fourthbatch = 97;
D_mean = np.matrix(np.load('D.mean.npy'));
print type(D_mean), D_mean.shape
Q_mean = np.matrix(np.load('Q.mean.npy'));
print type(Q_mean), Q_mean.shape

class COO_Struct:
    def __init__(self):
        self.data = [];
        self.row = [];
        self.col = [];

    def append(self, r, c, d):
        self.row.append(r);
        self.col.append(c);
        self.data.append(d);

#for a give  query-document pair, we need to compute the probability of this pair
def updateDig(dig):
    pass

def getRow2(s, d):
    row  = [];
    li = s.strip().split(' ');
    for l in li:
        pair = l.strip().split(':');
        index = int(pair[0]);
        value = float(pair[1]);
        row.append((index-1,value))
    return row;

logFileList = []
def listAllLogFile(logDir):
    if(os.path.isdir(logDir)):
        for f in os.listdir(logDir):
            sub = os.path.join(logDir, f);
            listAllLogFile(sub);
    elif os.path.isfile(logDir):
        logFileList.append(logDir);

def processline(line, D_Struct, Q_Struct, P_Struct, queryMap, count):
    global nSample;
    li = line.split('\t', 1);
    if (len(li) != 2):
        return;
    res = float(li[0]);
    pair = li[1];
    if count % 10000 == 0:
        pass
        #print 'processline %d' % count;
    digest = hashlib.sha224(pair).hexdigest();
    if queryMap.has_key(digest):
        P_Struct.append(queryMap[digest], queryMap[digest], res);
    else:
        instance = pair.split('\t');
        xi = getRow2(instance[0], dx);
        yij = getRow2(instance[1], dy);
    
        if len(xi) == 0:
            return;
        if len(yij) == 0:
            return;
        for j, data in xi:
            Q_Struct.append(nSample, j, data);
        for j, data in yij:
            D_Struct.append(nSample, j, data);

        P_Struct.append(nSample, nSample, res);
        queryMap[digest] = nSample;
        nSample += 1;

nSample = 0;
#Tricking Part is contrust D,P,Q Before known n;
def batchProcess(training, c, batchstop):
    D_Struct = COO_Struct();
    P_Struct = COO_Struct();
    Q_Struct = COO_Struct();

    D_Struct.append(0, dx-1, 0);
    Q_Struct.append(0, dy-1, 0);

    queryMap = {}
    count = 0;
    global nSample;
    nSample = 0;
    while c < batchstop:
        logFile = open(logFileList[c], 'r');
        print 'Processing %s' % logFileList[c];
        for line in logFile:
            if not line.startswith('*'):
                count += 1;
                processline(line, D_Struct, Q_Struct, P_Struct, queryMap, count);
        logFile.close();
        c += 1;

    D =  sp.coo_matrix((D_Struct.data, (D_Struct.row, D_Struct.col)), dtype=np.float64);
    P = sp.coo_matrix((P_Struct.data, (P_Struct.row, P_Struct.col)), dtype=np.float64);
    Q = sp.coo_matrix((Q_Struct.data, (Q_Struct.row, Q_Struct.col)), dtype =np.float64);
    updateDig(P);
    
    print 'normlizing'
    #D = utils.normlize(D.todense(), D_Mean);
    #Q = utils.normlize(Q.todense(), Q_Mean);
    print 'D.shape',D.shape, 'Q.shape', Q.shape, 'nSample', nSample, 'P.shape', P.shape
    #W = D.transpose().dot(P.tocsc()).dot(Q);
    #N = Q.transpose().dot(P.tocsc()).dot(D);
    #N = None;
    W= utils.computeWandN2(D.tocsr(), Q.tocsc(), D_mean, Q_mean);
    W = sp.csc_matrix(W);
    N = W.transpose();
    T = count;
    
    #Very Careful M.transpose N is csr format;
    return (W, N, T);
    
def onlineControl():
    logDir = '/home/wangshuxin/destLog/';
    listAllLogFile(logDir);
    logFileList.sort();
    print logFileList;
    print len(logFileList);
    training = None;
    c = 0;
    batch = 0;
    print 'Processing %d batch' % batch;
    W0,N0, T= batchProcess(training, c, basic); 

    print W0.shape, N0.shape;
    #Train pls.pls basic
    #pls.pls(W0.tocsc(), N0, dx, dy, batch);

   
    batch = 1;
    print 'Processing %d batch' % batch;
    W1, N1, T = batchProcess(training, basic, firstbatch);
    W1 = W1 + W0;
    N1 = N1 + N0;
    #pls.pls(W1.tocsc(), N1, dx, dy, batch);

    batch = 2;
    print 'Processing %d batch' % batch;
    W2, N2,T = batchProcess(training, firstbatch, secondbatch);

    #This could aslo to add up W0, and N0, in a decayed way
    W2 = W2 + W1;
    N2 = N2 + N1;

    #Release W1, N1;
    #del(W1);
    #del(N1);
    #pls.pls(W2.tocsc(), N2, dx, dy, batch);

 
    batch = 3;
    print 'Processing %d batch' % batch;
    W3, N3, T = batchProcess(training, secondbatch, thirdbatch);

    #This could aslo to add up W0, and N0, in a decayed way
    W3 = W3 + W2;
    N3 = N3 + N2;
    #del(W2);
    #del(N2);
    #Train pls.pls basic
    #pls.pls(W3.tocsc(), N3, dx,dy, batch)


    batch = 4;
    print 'Processing %d batch' % batch;
    W4, N4, T = batchProcess(training, thirdbatch, fourthbatch);
    W4 = W4 + W3;
    N4 = N4 + N3;
    utils.save_sparse_csc('W4', W4);
    utils.save_sparse_csc('N4', N4);
    
    #del(W3);
    #del(N3);
    pls.pls(W4, N4, dx, dy, batch)

if __name__ == '__main__':
    onlineControl();
