# -*- coding:utf-8 -*-

__author__ = 'ghostviper'

from webcrawler import crawler
from webparser import parser
import requests
from crawler_config import crawler_rule, start_urls
from redis import Redis
r = Redis()


def set_crawler_config(crawler_name, config):
    crawler_config_key = "{crawler_name}:crawler_config".format(crawler_name=crawler_name)
    r.set(crawler_config_key, str(config))


def get_crawler_config(crawler_name):
    crawler_config_key = "{crawler_name}:crawler_config".format(crawler_name=crawler_name)
    result = r.get(crawler_config_key)
    if result:
        return eval(result)
    else:
        return None

crawler_config = get_crawler_config("test_crawler")
crawler.apply_async(args=("http://xw.kunming.cn/today.htm", crawler_config))

"""
res = requests.get("http://xw.kunming.cn/a/2017-10/19/content_4807098.htm")
parser.apply_async(args=(res.text, crawler_config))
"""