import os, sys
import commands
fpath = sys.argv[1]

sitelist = [
	'163.com',
	'qq.com',
	'sina.com.cn',
	'xinhuanet.com',
	'ifeng.com',
	'hexun.com',
	'jiemian.com',
	'thepaper.com',
	'yicai.com',
]

c = commands.getstatusoutput("ps aux | grep xinhua")
if 'scrapy' in c[1]:
	exit()

for line in open(fpath,'r'):
	line = line.strip()
	print line 
	print '='*50
	os.system("scrapy crawl keywordSpider -L WARNING -a keywordList='"+ line  +  "' -a se=baidunews -a pages=5  -a MONGODB_SERVER='101.200.59.101' -a MONGODB_PORT=27017  -a MONGODB_DB=xinhua")
	os.system("scrapy crawl keywordSpider -L WARNING -a keywordList='"+ line  +  "' -a se=360news -a pages=5  -a MONGODB_SERVER='101.200.59.101' -a MONGODB_PORT=27017  -a MONGODB_DB=xinhua")
	for site in sitelist:
		os.system("scrapy crawl keywordSpider -L WARNING -a keywordList='"+ line  +"'   -a extkey='site:" + site +  "' -a se=baidunews -a pages=3  -a MONGODB_SERVER='101.200.59.101' -a MONGODB_PORT=27017  -a MONGODB_DB=xinhua")
		os.system("scrapy crawl keywordSpider -L WARNING -a keywordList='"+ line  +"'   -a extkey='site:" + site +  "' -a se=360news -a pages=3  -a MONGODB_SERVER='101.200.59.101' -a MONGODB_PORT=27017  -a MONGODB_DB=xinhua")


