# -*- coding: utf-8 -*-
import sys
sys.path.append("I:\zrb\zrbspider\zrbspider\scrapy_with_redis")

import json
import multiprocessing
from scrapy.cmdline import *
from mymodule.config import *
import os

class serverrun():
    def __init__(self, start,domain, num=1,isnew=True):
        os.chdir(path)
        self.num = num
        self.domain =domain
        self.start = start
        self.a = []
        self.r=getrdc()
        self.isnew = isnew
    def runspider(self):
        print self.start
        execute(['scrapy', 'crawl', 'zrbsp', '-a', "start_url=" + self.start])
    def run(self):
        pid_n = []
        for i in range(int(self.num)):
            p = multiprocessing.Process(target=self.runspider)
            p.start()
            pid_n.append(p.pid)
            self.r.rpush('process', p.pid)
        if not self.isnew:
            # return 'bushi'
            taskdata = self.r.lrange('task', start=0, end=-1)
            self.r.delete('task')
            for i in taskdata:
                i = eval(i)
                if i['taskName']==self.domain:
                    if i['taskNum']:
                        num = i['taskNum']
                        i['taskNum']=int(self.num)+int(num)
                    else:
                        i['taskNum'] = self.num
                    i['taskStatus']=2
                    i['pid']=i['pid']+pid_n
                data = json.dumps(i)
                self.r.lpush('task',data)
            return 666
        else:
            # return 'shi'
            taskData = {}
            taskData['taskUrl'] = self.start
            taskData['taskName'] = self.domain
            taskData['taskNum'] = self.num
            taskData['taskStatus'] = 2
            taskData['pid'] = pid_n
            data = json.dumps(taskData)
            self.r.lpush('task',data)
            return 6666

if __name__=="__main__":
    run=serverrun(start="http://www.taobao.com",domain="dangdang")
    run.runspider()
