import CrawlerConfig;
import UrlDb;

import time;
import sys;
import logging;
import threading;
import os;

class StreamCrawler:
    def __init__(self, stream, config):
        self.stream = stream;
        mode = config.crawlMode;
        self.filePath = config.streamFilePath;
#        if mode == CrawlerConfig.CONTINUE_MODE:
#            self.outFile = open(filePath, 'a');
#        else:
#            self.outFile = open(filePath, 'w');
        
    def crawl(self):
        os.system('wget ' + self.stream.url + ' -O ' + self.filePath + ' 2> .wget.err.log');
#        while 1:
#            element = self.stream.next();
#            self.outFile.write(element + '\n');
#            self.outFile.flush();            

        
