import math
import os
import sys
import urlparse
import urllib
import robotparser

'''
round22:(for file urls-04.txt) 1.0M/per machine
part: 16 OutOf 17.0
lowerBound: 141800001
upperBound: 142800001
urls have been written to: urls-part16-OutOf17-hadoop17-round22
'''

'''
round21:(for file urls-04.txt) 1.1M/per machine
part: 16 OutOf 17.0
lowerBound: 124700001
upperBound: 125800001
urls have been written to: urls-part16-OutOf17-hadoop17-round21

'''

'''
round20:(for file urls-04.txt) 1.0M/per machine
part: 16 OutOf 17.0
lowerBound: 106100001
upperBound: 107100001
urls have been written to: urls-part16-OutOf17-hadoop17-round20
'''

'''
round19:(for file urls-04.txt) 1.1M/per machine
part: 16 OutOf 17.0
lowerBound: 89000001
upperBound: 90100001
urls have been written to: urls-part16-OutOf17-hadoop17-round19
'''

'''
round18:(for file urls-04.txt) 1.0M/per machine
part: 16 OutOf 17.0
lowerBound: 70400001
upperBound: 71400001
urls have been written to: urls-part16-OutOf17-hadoop17-round18
'''

'''
round17:(for file urls-04.txt) 1.1M/per machine
part: 16 OutOf 17.0
lowerBound: 53300001
upperBound: 54400001
urls have been written to: urls-part16-OutOf17-hadoop17-round17
'''

'''
round16:(for file urls-04.txt) 1.0M/per machine
part: 16 OutOf 17.0
lowerBound: 34700001
upperBound: 35700001
urls have been written to: urls-part16-OutOf17-hadoop17-round16
'''

'''
round15:(for file urls-03.txt) 1.1M/per machine
part: 16 OutOf 17.0
lowerBound: 17600001
upperBound: 18700001
'''

'''
round14:(for file urls-03.txt) 1.0M/per machine
part: 16 OutOf 17.0
lowerBound: 141800000
upperBound: 142800000
urls have been written to: urls-part16-OutOf17-hadoop17-round14
'''

'''
round13:(for file urls-03.txt) 1.1M/per machine
part: 16 OutOf 17.0
lowerBound: 124700000
upperBound: 125800000
urls have been written to: urls-part16-OutOf17-hadoop17-round13
'''

'''
round12:(for file urls-03.txt) 2.0M/per machine
part: 16 OutOf 17.0
lowerBound: 105100000
upperBound: 107100000
urls have been written to: urls-part16-OutOf17-hadoop17-round12
'''

'''
round11:(for file urls-03.txt) 2.2M/per machine
part: 16 OutOf 17.0
lowerBound: 70900000
upperBound: 73100000
urls have been written to: urls-part16-OutOf17-hadoop17-round11
'''

'''
round10:(for file urls-03.txt) 2.1M/per machine
part: 16 OutOf 17.0
lowerBound: 33600000
upperBound: 35700000
urls have been written to: urls-part16-OutOf17-hadoop17-round10
'''

'''
round9:(for file urls-02.txt) 2.0M/per machine
part: 16 OutOf 17.0
lowerBound: 64300000
upperBound: 66300000
urls have been written to: urls-part16-OutOf17-hadoop17-round09
'''

'''
round8:(for file urls-02.txt) 1.9M/per machine
Start from offset = 0
part: 16 OutOf 17.0
lowerBound: 30400000
upperBound: 32300000
urls have been written to: urls-part16-OutOf17-hadoop17-round08
'''

'''
round7:(for file urls.txt)
part: 16 OutOf 17.0
lowerBound: 107300000
upperBound: 109300000
'''

'''
round6:(for file urls.txt)
part: 16 OutOf 17.0
lowerBound: 73300000
upperBound: 75300000
'''


def loadBlackList():
    dict = {}
    inputFileHandler2 = open("/data/weijiang/BingDataSetDownloading/programs/url-static-update-black-list","r")
    for currentLine in inputFileHandler2.readlines():
        if currentLine.strip() not in dict:
            dict[currentLine.strip()] = 1
        else:
            dict[currentLine.strip()] += 1
    return dict


print "program for splitting the urls."
print "Updated by Wei by 2012/09/11"

ROUND_NUMBER = 22
clusterMachineOKList = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17]
NUMBER_OF_MACHINES = float( len(clusterMachineOKList) )
NUMBER_OF_URLS_PER_MACHINE = 1000000 #1.1M

inputFileHandler = open("/data/jrodri04/search/urls-04.txt","r")
#inputFileHandler = open("/data/weijiang/BingDataSetDownloading/programs/urls-head-100.txt","r")

print "number Of URLs Trying To Split This Time:",NUMBER_OF_URLS_PER_MACHINE * NUMBER_OF_MACHINES
print "NUMBER_OF_URLS_PER_MACHINE:",NUMBER_OF_URLS_PER_MACHINE

totalLineCounter = 0
lowerBound = 125800001
upperBound = lowerBound + NUMBER_OF_URLS_PER_MACHINE
normalURLLine = 0
normalURLLineCanCrawl = 0
emptyLineCounter = 0
unknownFormatLine = 0
blackListURL = 0

blackListDict = loadBlackList()
#blackListDict = {}
print "length of the black list currently:",len(blackListDict)
'''
for url in blackListDict:
    print url,blackListDict[url]
'''

while totalLineCounter < lowerBound:
    currentLine = inputFileHandler.readline()
    totalLineCounter += 1

print "the last line to skip:",currentLine

for partCounter in range( 0,int(NUMBER_OF_MACHINES) ):
    outputFileName = "urls-part" + "%02d" % partCounter + "-OutOf" + str(int(NUMBER_OF_MACHINES)) + "-hadoop%02d" % clusterMachineOKList[partCounter] + "-round%02d" % ROUND_NUMBER;
    outputFileHandler = open("/data/weijiang/BingDataSetDownloading/urls-related/" +  "round%02d" % ROUND_NUMBER + "/"+ outputFileName,"w")
    
    print 
    print "part:",str(partCounter),"OutOf",NUMBER_OF_MACHINES
    print "lowerBound:",str(lowerBound)
    print "upperBound:",str(upperBound)
    
    

    while totalLineCounter < ( lowerBound + NUMBER_OF_URLS_PER_MACHINE * NUMBER_OF_MACHINES ) and totalLineCounter >= lowerBound and totalLineCounter < upperBound:
        currentLine = inputFileHandler.readline()
        
        if currentLine.strip() == "":
            emptyLineCounter += 1
            
        elif currentLine.strip() in blackListDict:
            blackListURL += 1
            
        elif currentLine.startswith("http://") or currentLine.startswith("https://"):
            normalURLLine += 1
            normalURLLineCanCrawl += 1
            outputFileHandler.write(currentLine)
            
        else:
            print currentLine
            unknownFormatLine += 1
            
        
        totalLineCounter += 1
        
        
        #for eachLine in URLs[int(lowerBound):int(upperBound)]:
        #    outputFileHandler.write(eachLine)
        
    outputFileHandler.close()
    print "urls have been written to:",outputFileName
    lowerBound += NUMBER_OF_URLS_PER_MACHINE
    upperBound += NUMBER_OF_URLS_PER_MACHINE
        
inputFileHandler.close()
print 
print "normalURLLine:",normalURLLine
print "normalURLLineCanCrawl:",normalURLLineCanCrawl

print "emptyLineCounter:",emptyLineCounter
print "unknownFormatLine:",unknownFormatLine
print "blackListURL:",blackListURL
print "totalLineCounter:",totalLineCounter