# -*- coding: utf-8 -*-
from scrapy_jsonrpc.jsonrpc import jsonrpc_client_call, JsonRpcError
from six.moves.urllib.parse import urljoin
import sys, optparse, urllib, json
from flask import current_app
import requests


class SpiderManager():
    host = 'localhost'
    port = 6024

    def __init__(self):
        app = current_app._get_current_object()
        self.host = app.config['SCRAPYD_HOST']
        self.port = app.config['SCRAPYD_PORT']

    def get_wsurl(self, path):
        return urljoin("http://%s:%s/"% (self.host, self.port), path)
    
    def post(self, path, data={}):
        url = self.get_wsurl(path)
        return requests.post(url, data)

    def json_get(self, path):
        url = self.get_wsurl(path)
        return requests.get(url).json()

    def stop(self, job):
        """stop <spider> - stop a running spider"""
        return self.post('cancel.json', {"project": "spider", "job": job})

    def open(self, spider):
        """open <spider> - open a spider"""
        return self.post('schedule.json', {'project': 'spider', 'spider': spider})

    def spiders_status(self):
        return self.json_get('listjobs.json?project=spider')

    def available_spider(self):
        """list-available - list name of available spiders"""
        return self.json_get('listspiders.json?project=spider')
