import time;

'''
    class BlogUpdateCrawler is to get updated urls and output them to pipe
'''
class BlogUpdateCrawler:
    def __init__(self, config):
        self.sleepTime = float(config.data[config.UPDATE_SLEEP_TIME_VARNAME]);
        self.pipePath = config.data[config.PIPE_FILE_PATH_VARNAME];
        self.pipeLock = config.data[config.PIPE_LOCK_VARNAME]; 
        self.homeUrlUpdator = config.data[config.HOME_URL_UPDATOR_VARNAME];
    
    '''
        output urls to pipe
    '''
    def outputUpdateHomeUrls(self, updateHomeUrls):
        self.pipeLock.acquire();                               
        pipeFile = open(self.pipePath, 'a');
        for updateHomeUrl in updateHomeUrls:
            pipeFile.write(updateHomeUrl + '\n');
        pipeFile.close();
        self.pipeLock.release();
        return 0;
        
    '''
        main function: get updated urls and output to pipe
    '''
    def crawl(self):
        while 1:
            updateHomeUrls = self.homeUrlUpdator.update();
            self.outputUpdateHomeUrls(updateHomeUrls);
            time.sleep(self.sleepTime);
    
    