import sys
from multiprocessing import Process, Value, Lock
import time
import os
import puzautils.twitter_api.twitter_crawler as tc
import math
import logging

def detect_dead(ft, lock):
    global last_time
    flag = 0
    with lock:        
        diff = math.fabs(ft.value - time.time())
        if math.floor(diff) > 600:
            #print ft.value, time.time(), 'timeout'
            logging.warn('Crawling process hangs. [Timeout]:' + str(diff))
            flag = 1
    return flag

if __name__ == '__main__':
    current_dir =  os.path.dirname(os.path.realpath(__file__))
    logging.basicConfig(filename= 'puza_twitter_stream_monitor.log', filemode='w', level=logging.INFO, format='%(asctime)s::%(levelname)s::[%(module)s::%(funcName)s]::%(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
    logging.info('Started Twitter Stream Monitor.')
    fail = 0
    while True:
        #=============================================
        c = tc.TwitterCrawler('day')
        c.Auth()    
        r = tc.CrawlerReporter()
        ft = Value('d',time.time())
        lock = Lock()
        r.SetReport(ft, lock)
        #=============================================    
        t = Process(target=c.ListenStreamSample, args=(current_dir, r))
        logging.info('Crawling process is started. [Start]:' + str(fail))
        t.start()
        while True:
            time.sleep(5)
            if detect_dead(ft, lock) == 1:
                break
        t.terminate()
        fail = fail + 1
        logging.info('Crawling process is terminated. [Fail]:' + str(fail))
        time.sleep(30)
        logging.info('Wait 30 seconds to another trial.' + str(fail))
        if fail > 5:
            break
    logging.info('Finish Twitter Stream Monitor.')
    