import getopt
import sys
import os
from scrapy import cmdline

start_url = ''
key = ''
node_id = ''
site_id = ''

opts, args = getopt.getopt(sys.argv[1:], 'hi:o:d', ['start_url=', 'key=', 'node_id=', 'site_id=', 'help'])
for op, value in opts:
    if op == '--start_url':
        start_url = value
    elif op == '--key':
        key = value
    elif op == '--node_id':
        node_id = value
    elif op == '--site_id':
        site_id = value

if node_id == '':
    exit('miss params node_id')
if site_id == '':
    exit('miss params site_id')
if start_url == '':
    exit('miss params start_url')
cmd_spider = 'scrapy crawl default -a spider=' + key + ' -a node_id=' + node_id + ' -a site_id=' + site_id \
             + ' -a start_url=' + start_url
cmdline.execute(cmd_spider.split())
