#!/usr/bin/python

import pickle
import subprocess

step = 10
filepath = './citylist.txt'
files = open(filepath, 'rb')

cities = pickle.load(files)

#workaround code remove future
new_cities=  cities[1:]
cmdline = 'scrapy crawl ctrip -a spiderid=%d -a step=%d -a cityfilepath=%s'
logfile = 'log/ctrip_spider_%d.log'
for i in range(0, len(new_cities), step):
    #print cmdline % (i, step, filepath)
    subprocess.Popen(cmdline % (i, step, filepath), shell=True, stdout=open(logfile % (i), 'w'), stderr=subprocess.STDOUT)
