# The following is the normalized posting hit routine
from __future__ import division
from operator import itemgetter, attrgetter
from struct import *
# import gc
import copy
import math
import os
import random
import sys
import time
from sets import Set
from random import randint
import re
# import numpy as np
from os import walk
# import matplotlib.pyplot as plt
from subprocess import call
import os

# compute the overall statistics for the posting hit / pt
def normalizedPostingHitRoutine_step1():
    print "Begins..."
    postingHitDict = {}
    ifn = "/home/vgc/wei/workspace/NYU_IRTK/data/unigramFromWei/documentPostingArrays/gov2/docHitsANDPostingHit/" + sys.argv[1]
    lowerBound = ifn.strip().split("/")[-1].split("_")[-6]
    upperBound = ifn.strip().split("/")[-1].split("_")[-5]
    ofn1 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/postingHitHistogram_normalized_20141204/usingSmoothedPostingCounts/normalizedPostingHit_histogram_" + lowerBound + "_" + upperBound + "_" + "20141210"
    print ifn
    print ofn1
    ofh1 = open(ofn1,"w")
    inputFileHandler0 = open(ifn,"rb")
    statinfo = os.stat(ifn)
    fileSize = statinfo.st_size
    print "currentInputFileName: ",ifn
    print "file size:",fileSize
    numOfBytesRead = 0
    numOfDocumentsProcessed = 0
    totalNumOfPostings = 0
    while numOfBytesRead < fileSize:
        byteString = inputFileHandler0.read(4 + 4 + 4)
        (docIDFromFile,numOfPostings,numDocHit) = unpack( "3I", byteString)
        print docIDFromFile,numOfPostings,numDocHit
        for i in range(0,numOfPostings):
            byteString = inputFileHandler0.read(4 + 4 + 4 + 4)
            (termID,impactScore,postingHit,probability) = unpack( "1I1f1I1f", byteString)
            # print "----->",i,termID,impactScore,postingHit,probability
            normalizedPostingHit = 0.0
            if probability != 0.0:
                normalizedPostingHit = int( (postingHit + 1) / probability )
            else:
                if termID == 100000000:
                    print "encounter 100000000"
                else:
                    print termID,impactScore,postingHit,probability
                    print "exit(1)"
                    exit(1)
            if normalizedPostingHit not in postingHitDict:
                postingHitDict[normalizedPostingHit] = 1
            else:
                postingHitDict[normalizedPostingHit] += 1
        totalNumOfPostings += numOfPostings
        numOfBytesRead += 12 + numOfPostings * 16
        numOfDocumentsProcessed += 1
        
        #if docIDFromFile == 10000:
        #    break
    
    for currPostingHit in postingHitDict:
        ofh1.write(str(currPostingHit) + " " + str(postingHitDict[currPostingHit]) + "\n")
    
    print "Overall:"
    print "ifn:",ifn
    print "ofn1:",ofn1
    print "numOfDocumentsProcessed:",numOfDocumentsProcessed
    print "totalNumOfPostings:",totalNumOfPostings
    inputFileHandler0.close()
    print "Ends."

def normalizedPostingHitRoutine_step2(ifn):
    # make the hit counts in binary format
    ofn = ifn + "." + "binary"
    ofh = open(ofn,"wb")
    ifh = open(ifn,"r")
    
    l = ifh.readline()
    lineCounter = 1
    while l:
        le = l.strip().split(" ")
        if le[0] != "0.0":
            normalizedPostingHit = int(le[0][:-1])
        else:
            normalizedPostingHit = 0
        numOfPostings = int(le[1])
        # print le[0],normalizedPostingHit,numOfPostings
        ofh.write(pack("2I",normalizedPostingHit,numOfPostings))
        # if lineCounter % 10 == 0:
        #    break
        l = ifh.readline()
        lineCounter += 1
        if lineCounter % 1000000 == 0:
            print lineCounter,"lines processed."
    print "Overall:"
    print "ifn:",ifn
    print "ofn:",ofn
    ifh.close()
    ofh.close()
    exit(1)

def normalizedPostingHitRoutine_step3():
    ifn = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/postingHitHistogram_normalized_20141204/usingSmoothedPostingCounts/resultfile0"
    inputFileHandler0 = open(ifn,"rb")
    statinfo = os.stat(ifn)
    fileSize = statinfo.st_size
    print "currentInputFileName: ",ifn
    print "file size:",fileSize
    numOfBytesRead = 0
    numOfDocumentsProcessed = 0
    numOfPostingsInTotal = 0
    previousScore = 2000000000
    lineCounter = 1
    while numOfBytesRead < fileSize:
        byteString = inputFileHandler0.read(4 + 4)
        (score,numOfPostings) = unpack( "2I", byteString)
        lineCounter += 1
        numOfPostingsInTotal += numOfPostings
        numOfBytesRead += 8
    inputFileHandler0.close()
    print "Ends."
    print "Overall:"
    print "ifn:",ifn
    print "numOfPostingsInTotal:",numOfPostingsInTotal
    inputFileHandler0.close()

def normalizedPostingHitRoutine_step4():
    pass


def fun2(ifn):
    print "Begins..."
    # ifn = "/home/vgc/wei/workspace/NYU_IRTK/data/unigramFromWei/documentPostingArrays/gov2/docHitsANDPostingHit/GOV2_documentPostingArray_0M_1M_docHit_postingHit_added"
    ifn = "/home/vgc/wei/workspace/NYU_IRTK/data/unigramFromWei/documentPostingArrays/gov2/docHitsANDPostingHit/" + ifn 
    lowerBound = ifn.strip().split("/")[-1].split("_")[-5]
    upperBound = ifn.strip().split("/")[-1].split("_")[-4]
    ofn1 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/postingPerDocumentCoverageAnalysis_20141123/postingPerDocumentCoverage_" + lowerBound + "_" + upperBound
    print "ifn:",ifn
    print "ofn1:",ofn1
    inputFileHandler0 = open(ifn,"rb")
    ofh = open(ofn1,"w")
    statinfo = os.stat(ifn)
    fileSize = statinfo.st_size
    print "currentInputFileName: ",ifn
    print "file size:",fileSize
    numOfBytesRead = 0
    numOfDocumentsProcessed = 0
    docIDCounter = 0
    while numOfBytesRead < fileSize:
        byteString = inputFileHandler0.read(4 + 4 + 4)
        (docIDFromFile,numOfPostings,numDocHit) = unpack( "3I", byteString)
        counter = 0
        for i in range(0,numOfPostings):
            byteString = inputFileHandler0.read(4 + 4 + 4)
            (termID,impactScore,postingHit) = unpack( "1I1f1I", byteString)
            # print "----->",i,termID,impactScore,postingHit
            if postingHit != 0:
                counter += 1
        # print docIDFromFile,counter,numOfPostings,counter/numOfPostings
        ofh.write(str(docIDFromFile) + " " + str(counter) + " " + str(numOfPostings) + " " +str(counter/numOfPostings)+ "\n")
        numOfBytesRead += 12 + numOfPostings * 12
        numOfDocumentsProcessed += 1
        docIDCounter += 1
        #if docIDCounter == 10:
        #    break
        
    print "Overall:"
    print "ifn:",ifn
    print "ofn1:",ofn1
    print "numOfDocumentsProcessed:",numOfDocumentsProcessed
    print "docIDCounter:",docIDCounter
    inputFileHandler0.close()
    ofh.close()
    print "Ends."

def fun1():
    print "Begins..."
    docHitDict = {}
    postingHitDict = {}
    
    # ifn = "/home/vgc/wei/workspace/NYU_IRTK/data/unigramFromWei/documentPostingArrays/gov2/docHitsANDPostingHit/GOV2_documentPostingArray_0M_1M_docHit_postingHit_added"
    ifn = "/home/vgc/wei/workspace/NYU_IRTK/data/unigramFromWei/documentPostingArrays/gov2/docHitsANDPostingHit/" + sys.argv[1]
    lowerBound = ifn.strip().split("/")[-1].split("_")[-5]
    upperBound = ifn.strip().split("/")[-1].split("_")[-4]
    ofn1 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/docHitHistogram_20141120/docHit_histogram_" + lowerBound + "_" + upperBound
    ofn2 = "/home/vgc/wei/workspace/NYU_IRTK/polyIRToolkit_Wei/postingHitHistogram_20141120/postingHit_histogram_" + lowerBound + "_" + upperBound
    print ifn
    print ofn1
    print ofn2
    ofh1 = open(ofn1,"w")
    ofh2 = open(ofn2,"w")
    inputFileHandler0 = open(ifn,"rb")
    statinfo = os.stat(ifn)
    fileSize = statinfo.st_size
    print "currentInputFileName: ",ifn
    print "file size:",fileSize
    numOfBytesRead = 0
    numOfDocumentsProcessed = 0
    while numOfBytesRead < fileSize:
        byteString = inputFileHandler0.read(4 + 4 + 4)
        (docIDFromFile,numOfPostings,numDocHit) = unpack( "3I", byteString)
        if numDocHit not in docHitDict:
            docHitDict[numDocHit] = 1
        else:
            docHitDict[numDocHit] += 1
        print docIDFromFile,numOfPostings,numDocHit
        for i in range(0,numOfPostings):
            byteString = inputFileHandler0.read(4 + 4 + 4)
            (termID,impactScore,postingHit) = unpack( "1I1f1I", byteString)
            # print "----->",i,termID,impactScore,postingHit
            if postingHit not in postingHitDict:
                postingHitDict[postingHit] = 1
            else:
                postingHitDict[postingHit] += 1
    
        numOfBytesRead += 12 + numOfPostings * 12
        numOfDocumentsProcessed += 1
        
        #if docIDFromFile == 1:
        #    break
    
    for currDocHit in docHitDict:
        ofh1.write(str(currDocHit) + " " + str(docHitDict[currDocHit]) + "\n")
    
    for currPostingHit in postingHitDict:
        ofh2.write(str(currPostingHit) + " " + str(postingHitDict[currPostingHit]) + "\n")
    
    print "Overall:"
    print "ifn:",ifn
    print "ofn1:",ofn1
    print "ofn2:",ofn2
    print "numOfDocumentsProcessed:",numOfDocumentsProcessed
    inputFileHandler0.close()
    print "Ends."

print "Program Begins..."
# option1:
# fun1()
# option2:
# ifn = sys.argv[1]
# fun2(ifn)
# option3:
normalizedPostingHitRoutine_step1()
# normalizedPostingHitRoutine_step2(sys.argv[1])
# normalizedPostingHitRoutine_step3()
print "Program Ends."




