# -*- coding: utf-8 -*-
"""
Created on 2021-09-14 14:43:31
---------
@summary:
---------
@author: xubin
"""

import feapder


class CloundClone(feapder.BatchSpider):
    # 自定义数据库，若项目中有setting.py文件，此自定义可删除
    __custom_setting__ = dict(
        REDISDB_IP_PORTS="192.168.1.55:6379",
        REDISDB_USER_PASS="deepbio6379",
        REDISDB_DB=0,
        MYSQL_IP="192.168.1.99",
        MYSQL_PORT=3307,
        MYSQL_DB="feapder",
        MYSQL_USER_NAME="root",
        MYSQL_USER_PASS="root",
    )

    def start_requests(self, task):
        name = task['name']
        # url = task['url']
        url = 'http://www.cloud-clone.com.cn/Search/search.html?keywords=&type=121&specie='

        yield feapder.Request(url, name=name)

    def parse(self, request, response):
        print(response)


if __name__ == "__main__":
    spider = CloundClone(
        redis_key="cloud_clone:task",  # redis中存放任务等信息的根key
        task_table="clone_task_info",  # mysql中的任务表
        task_keys=["id", "name", "url", "status"],  # 需要获取任务表里的字段名，可添加多个
        task_state="status",  # mysql中任务状态字段
        batch_record_table="cloud_clone_batch_record",  # mysql中的批次记录表
        batch_name="clone_clone(云客隆)",  # 批次名字
        batch_interval=1/24,  # 批次周期 天为单位 若为小时 可写 1 / 24
    )

    # spider.start_monitor_task()  # 下发及监控任务
    spider.start()  # 采集
