import blogcrawlerUtil;
import CrawlerConfig;

import sys;

def usage():
    blogcrawlerUtil.fatal_error(sys.argv[0] + ' <config-file-path>');
                    
if __name__ == '__main__':
    argc = len(sys.argv);
    if argc <> 2:
        usage();
    configPath = sys.argv[1];
    print 'main: loading config......';
    config = CrawlerConfig.Config(configPath);
    blogSite = config.data[CrawlerConfig.BLOG_SITE_VARNAME]; 
    if blogSite == CrawlerConfig.LIVESPACE_BLOG or blogSite == CrawlerConfig.BLOGSPOT_BLOG:
        config = CrawlerConfig.UpdatePageCrawlerConfig(configPath);
    elif blogSite == CrawlerConfig.LIVEJOURNAL_BLOG:
        config = CrawlerConfig.AllInStreamBlogCrawlerConfig(configPath);
    elif blogSite == CrawlerConfig.SINA_BLOG:
        config = CrawlerConfig.SeedBasedCrawlerConfig(configPath);
    print 'main: creating crawlers......';
    crawler = config.createCrawler();
    print 'main: crawling......';
    crawler.crawl();
    
        
