#title          : getTrainingAndTestFirstMinutes.py
#description    : divides list of posts into a training and test set. Training set consists of the first hour of a cascade.
#author         : Arthi Ramachandran
#date           : 20140203 
#usage          : python ./getTrainingAndTestFirstMinutes.py <posts> <training output file> <test output file>
#python_version : 2.7
#============================================================================ 

import sys
import datetime
import time


fmt = "%m/%d/%Y %H:%M:%S"
posts = dict()
posts_user = dict()
count = 0
with open(sys.argv[1], "rb") as f:
    for line in f:
        fields = line.strip().split(",")
        user = fields[0]
        url = fields[1]
        posting_time = datetime.datetime.strptime(fields[2].strip("\""), "%m/%d/%Y %H:%M:%S")
        #print posting_time
        if url not in posts:
            posts[url] = []
        posts[url].append(posting_time)
        if url not in posts_user:
            posts_user[url] = dict()
        posts_user[url][user] = posting_time
        count += 1
        if count % 100000 == 0: 
            print count

print "done reading"

def printNiceTimeDelta(stime, etime):
    #print etime-stime
    #delay = datetime.timedelta(etime - stime)
    delay = etime - stime
    if (delay.days > 0):
        out = str(delay).replace(" days, ", ":")
        out = str(out).replace(" day, ", ":")
    else:
        out = "0:" + str(delay)
    #print out
    outAr = out.split(':')
    #for x in outAr:
    #    print x, outAr
    #    "%02d" % (int(float(x)))
    outAr = ["%02d" % (int(float(x))) for x in outAr]
    out   = ":".join(outAr)
    return out

fout_train = open(sys.argv[2], "w+")
fout_test = open(sys.argv[3], "w+")
for url in posts:
    a = sorted(posts[url])
    #if len(a) > 10: # ignore those urls with < 10 posts - can't learn anything from those
    #    print url + "," + str(len(a)) + "," +  a[0].strftime(fmt) + "," +  a[len(a)-1].strftime(fmt) + "," +  printNiceTimeDelta(a[0],a[len(a)-1])
    #cut -f5 -d, postingFilteringLawRawDataNormalizedURLs2.withtiming.delays.txt | cut -f1-2 -d: | sort | uniq -c

    # get the first hour of posts
    beg_time = a[0]
    #print "beg time ", beg_time
    test_count = train_count = 0
    for user in posts_user[url]:
        t = posts_user[url][user]
        if t - beg_time > datetime.timedelta(hours=1):
            fout_test.write(user + "," + url + "," + t.strftime(fmt) + "\n")
            test_count += 1
        else:
            fout_train.write(user + "," + url + "," + t.strftime(fmt) + "\n")
            train_count += 1
    print train_count, test_count
