from crawl_base import CrawlBaseHandle
import uuid
from datetime import datetime
format_time = '%Y-%m-%d %H:%M:%S'

class BaseCrawl(CrawlBaseHandle):
    # url配置信息
    crawlname = ''
    # 一般配置信息
    gen_conf = {
        "crawl_process_num": 4,  # 爬虫进程数量
        "save_process_num": 1,  # 保存数据进程数量
        "queue_num": 10000,   # 队列大小
    }
    database_conf = {
        # 本机root数据库
        'crawl': {
            'engine': 'pymysql',   # db type, eg: mysql, pymysql, sqlite
            'host': '127.0.0.1',    # db host
            'port': 3306,  # db port
            'user': 'root',   # db user
            'passwd': 'shuiqing',   # db password
            'db': 'crawl',    # db name
            'charset': 'utf8',  # db charset
            'conn': 16,  # db connections in pool
        },
    }

    # url配置信息
    url_conf = {
        # type取值：fixed，database,
        "type": "database",
        "db_conf": {
            'database': 'crawl',
            'table': 'zybd_jibing',  # db table
            'fields': 'url',  # db field
            'other': ''
        },
        "value": "url",  # 当type为fixed时使用此url做为基础的请求url
        # 对url做字符串的替换, 使用的是re.sub
        "replace": {
            r'^': "http://127.0.0.1:8080"
        },
    }
    # 爬虫获取字段信息
    parse_html_conf = {
        "department_id": {
            'type': 'html',
            'target': 'li_list',
            'parse_conf': [
                ('class', 'ul-ss-3 jb-xx-ks', '1'),
                ('inner', 'li', '0'),
            ],
            'value_map': {
                # from_db, from_file, from_value
                "type": "from_db",
                "db_conf": {
                    'database': 'crawl',
                    'table': 'zybd_department',  # db table
                    'fields': ('name', 'id'),
                },
                "file_path": "",
                "key_field": "name",
                "value_field": 'id',
            }
        },
        "jibing_id": {
            # type: database,  html, value
            "type": "database",
            "db_conf": {
                'database': 'crawl',
                'table': 'zybd_jibing',  # db table
                'fields': 'id',
                'where': {"url": "&1&"},  # db field
                'replace': {
                    '&1&': 'url',
                    'http://127.0.0.1:8080': ''
                }
            },
            # 对url做字符串的替换
            "value": 'id',
            "replace": {}
        }
    }
    # 下载配置
    html_down_conf = {
        "user_agent": "&random&",  # 需要加入的user_agent值 &random& 代表随机
        "add_header": {},  # 需要加入的新的header值
        "sleep": 0,  # 两个url下载之间的间隔时间
    }
    # 解析字段追加url配置信息
    parse_url_conf = {
        "crawl_field": "url",  # 要获取的字段
        "base_url": "",  # 需要添加的ip地址或域名
        "url_replace": []  # 需要对url执行替换的字符串
    }
    save_conf = {
        # "type": "db_insert",  # type取值："file" or "db_insert" or "db_update"
        "type": "db_insert",  # type取值："file" or "db_insert" or "db_update"
        "db_conf": {    # type为db时使用,数据库的基本配置
            'database': 'crawl',
            'table': 'zybd_jibing_department',  # db table
        },
        "file_conf": {  # type 为file时使用, 文件名是系统自动生成的
            "filepath": "./aa.txt"  # 要保存文件的路径
        },
        "field_conf": {
            "jibing_id": {
                'type': 'from_parse_field',
                'field': "jibing_id"
            },
            "department_id": {
                'type': "from_parse_field",
                'field': "department_id"
            }
        },
        'list_field': 'department_id'

    }

def create_url():
    url_list = ['/name/']
    for i in range(2, 46):
        url_list.append('/name/page_%s.html' % i)
    return url_list

def get_uuid():
    return str(uuid.uuid4()).replace('-', '')

def get_now_time():
    return datetime.strftime(datetime.now(), format_time)

class FangJi(BaseCrawl):
    # 一般配置信息
    gen_conf = {
        "crawl_process_num": 1,  # 爬虫进程数量
        "save_process_num": 1,  # 保存数据进程数量
        "queue_num": 10000,   # 队列大小
    }
    # url配置信息
    url_conf = {
        # type取值：fixed，database, def
        "type": "fixed",
        "value": create_url(),
        "replace": {
            r'^': "http://127.0.0.1:8080"
        },
    }
    # 爬虫获取字段信息
    parse_html_conf = {
        "url": {
            'type': 'html',
            'target': 'href_list',
            'parse_conf': [
                ('class', 'listbox', '1'),
                ('inner', 'a', '0'),
            ],
        },
    }
    save_conf = {
        # "type": "db_insert",  # type取值："file" or "db_insert" or "db_update"
        "type": "db_insert",  # type取值："file" or "db_insert" or "db_update"
        "db_conf": {    # type为db时使用,数据库的基本配置
            'database': 'crawl',
            'table': 'zhongyoo_fangji',  # db table
        },
        "field_conf": {
            "id": {
                'type': "fixed",
                'field': get_uuid
            },
            "name": {
                'type': "fixed",
                'field': "none"
            },
            "url": {
                'type': "from_parse_field",
                'field': "url"
            },
            "create_time": {
                'type': "fixed",
                'field': get_now_time
            },
            "update_time": {
                'type': "fixed",
                'field': get_now_time
            },

        },
        'list_field': 'url'

    }

class ZhongYao(BaseCrawl):
    # 一般配置信息
    gen_conf = {
        "crawl_process_num": 1,  # 爬虫进程数量
        "save_process_num": 1,  # 保存数据进程数量
        "queue_num": 10000,   # 队列大小
    }
    # url配置信息
    url_conf = {
        # type取值：fixed，database, def
        "type": "fixed",
        "value": create_url(),
        "replace": {
            r'^': "http://www.zhongyoo.com"
        },
    }
    # 爬虫获取字段信息
    parse_html_conf = {
        "url": {
            'type': 'html',
            'target': 'href_list',
            'parse_conf': [
                ('class', 'r2-con', '1'),
                ('inner', 'a', '0'),
            ],
        },
    }
    save_conf = {
        # "type": "db_insert",  # type取值："file" or "db_insert" or "db_update"
        "type": "db_insert",  # type取值："file" or "db_insert" or "db_update"
        "db_conf": {    # type为db时使用,数据库的基本配置
            'database': 'crawl',
            'table': 'zhongyoo_zhongyao',  # db table
        },
        "field_conf": {
            "id": {
                'type': "fixed",
                'field': get_uuid
            },
            "name": {
                'type': "fixed",
                'field': "none"
            },
            "url": {
                'type': "from_parse_field",
                'field': "url"
            },
            "create_time": {
                'type': "fixed",
                'field': get_now_time
            },
            "update_time": {
                'type': "fixed",
                'field': get_now_time
            },

        },
        'list_field': 'url'
    }

class ZhongYao_name(BaseCrawl):
    # 一般配置信息
    gen_conf = {
        "crawl_process_num": 1,  # 爬虫进程数量
        "save_process_num": 1,  # 保存数据进程数量
        "queue_num": 10000,   # 队列大小
    }
    # url配置信息
    url_conf = {
        # type取值：fixed，database, def
        "type": "database",
        "db_conf": {
            'database': 'crawl',
            'table': 'zhongyoo_zhongyao',  # db table
            'fields': 'url',  # db field
            'where': {'name': 'none'}
        },
        "value": "url",  # 当type为fixed时使用此url做为基础的请求url
    }
    # 爬虫获取字段信息
    parse_html_conf = {
        "name": {
            'type': 'html',
            'target': 'p_str',
            'parse_conf': [
                ('class', 'title', '1'),
            ],
        },
        "url": {
            # type: database,  html, value
            "type": "database",
            "db_conf": {
                'database': 'crawl',
                'table': 'zhongyoo_zhongyao',  # db table
                'fields': 'url',
                'where': {"url": "&1&"},  # db field
                'replace': {
                    '&1&': 'url',
                }
            },
            "value": 'url',
        },
    }
    save_conf = {
        # "type": "db_insert",  # type取值："file" or "db_insert" or "db_update"
        "type": "db_update",  # type取值："file" or "db_insert" or "db_update"
        "db_conf": {    # type为db时使用,数据库的基本配置
            'database': 'crawl',
            'table': 'zhongyoo_zhongyao',  # db table
        },
        "field_conf": {
            "name": {
                'type': "from_parse_field",
                'field': "name"
            },
            "url": {
                'type': "from_parse_field",
                'field': "url"
            },
            "update_time": {
                'type': "fixed",
                'field': get_now_time
            },
        },
        'update_key': ['url']
    }

class ZhongYao_detail(BaseCrawl):
    # 一般配置信息
    gen_conf = {
        "crawl_process_num": 3,  # 爬虫进程数量
        "save_process_num": 1,  # 保存数据进程数量
        "queue_num": 10000,   # 队列大小
    }
    # url配置信息
    url_conf = {
        # type取值：fixed，database, def
        "type": "database",
        "db_conf": {
            'database': 'crawl',
            'table': 'zhongyoo_zhongchengyao',  # db table
            'fields': 'url',  # db field
            'where': {'detail': ('is', None)}
        },
        "value": "url",  # 当type为fixed时使用此url做为基础的请求url
    }
    # 爬虫获取字段信息
    parse_html_conf = {
        "detail": {
            'type': 'html',
            'target': 'p_str',
            'parse_conf': [
                ('class', 'text', '2'),
                ('inner', 'p', '0'),
            ],
        },
        "url": {
            # type: database,  html, value
            "type": "database",
            "db_conf": {
                'database': 'crawl',
                'table': 'zhongyoo_zhongchengyao',  # db table
                'fields': 'url',
                'where': {"url": "&1&"},  # db field
                'replace': {
                    '&1&': 'url',
                }
            },
            "value": 'url',
        },
    }
    save_conf = {
        # "type": "db_insert",  # type取值："file" or "db_insert" or "db_update"
        "type": "db_update",  # type取值："file" or "db_insert" or "db_update"
        "db_conf": { 'database': 'crawl', 'table': 'zhongyoo_zhongchengyao'},
        "field_conf": {
            "detail": { 'type': "from_parse_field", 'field': "detail" },
            "url": { 'type': "from_parse_field", 'field': "url" },
            "update_time": { 'type': "fixed", 'field': get_now_time },
        },
        'update_key': ['url']
    }

if __name__ == '__main__':
    ZhongYao_detail().start()
