#!/usr/bin/env python
# encoding=utf-8

import json
import sys
import datetime
from scpy.logger import get_logger

import scpy2.util as util
from scpy2.crawlers.crawler import CrawlProcessor
from parse_util.lagou_crawler import LagouCrawler
from parse_util.job51_crawler import Job51Crawler
from parse_util.lieping_job_crawler import LiePinCrawler
from parse_util.zhi_lian_crawler import ZhilianCrawler
reload(sys)
sys.setdefaultencoding('utf-8')

logger = get_logger(__file__)


class RecruitmentLagou(CrawlProcessor):
    def crawl(self, params):
        return LagouCrawler(params.get('company_name')).run()


class RecruitmentJob51(CrawlProcessor):
    def crawl(self, params):
        return Job51Crawler(params.get('company_name')).run()


class RecruitmentLiePin(CrawlProcessor):
    def crawl(self, params):
        return LiePinCrawler(params.get('company_name')).run()

class RecruitmentZhiLian(CrawlProcessor):
    def crawl(self, params):
        return ZhilianCrawler(params.get('company_name')).run()

