from tools.command import ScrapyCommand
from tools.conf import settings


class Command(ScrapyCommand):

    requires_project = False

    def syntax(self):
        return "-f flow_name -i input_file -t task_name"

    def short_desc(self):
        return "put data_file to database"

    def add_options(self, parser):
        ScrapyCommand.add_options(self, parser)
        #parser.add_option("-n", "--nofollow", dest="nofollow", action="store_true", \
        #    help="don't follow links (for use with URLs only)")

    def process_options(self, args, opts):
        ScrapyCommand.process_options(self, args, opts)
        #if opts.nofollow:
        #    settings.overrides['CRAWLSPIDER_FOLLOW_LINKS'] = False

    def run(self, args, opts):
		print "123"
