import Spider
import EDUtil, DBUtil
from concurrent.futures import ThreadPoolExecutor

spider = Spider.Spider()
db_save = DBUtil.SaveData()
db_select = DBUtil.QueryData()
db_update = DBUtil.UpdateData()


def save_city():
    """将数据存入city表中"""
    # 调用Spider中的获取城市数据
    city_data = spider.get_city_data()
    try:
        if city_data:
            # 循环将城市数据中的名称和地址提取出来
            for name, url in city_data.items():
                # id 进行(name + url)sha1加密
                id = EDUtil.encrypt(name + url)
                db_save.save_city(id=id, name=name, url=url)
        else:
            print('未获取到城市信息，无法进行存储！')
    except Exception as e:
        print(repr(e))


def save_area():
    """将数据存入area表中"""
    # 从数据中调用出city表中的 id 和 url
    city_data_list = db_select.select_col_tbl('city', 'id', 'url')
    try:
        for city_data in city_data_list:
            city_id = city_data['id']
            city_url = city_data['url']
            area_data = spider.get_area_data(city_url)
            with ThreadPoolExecutor(10) as th:
                th.submit(spider.get_area_data)
            if area_data:
                for area_name, area_url in area_data.items():
                    # 将id(area_name + city_id)进行加密
                    area_id = EDUtil.encrypt(area_name + city_id)
                    db_save.save_area(area_id, area_name, area_url, city_id)
                    # 获取到区域信息，将city表中的状态码改为1
                    db_update.upd_city_status(1, city_id)
            else:
                # 如果没有获取到区域信息，将city表中的状态码改为-1
                db_update.upd_city_status(-1, city_id)
    except Exception as e:
        print(repr(e))


def save_job_data():
    """将输入存入job表中"""
    # 从数据中调用出area表中的 id 和 url
    area_data_list = db_select.select_col_tbl('area', 'id', 'url', 'name')
    try:
        i = 0
        for area_data in area_data_list:
            area_id = area_data['id']
            area_url = area_data['url']
            area_name = area_data['name']
            i += 1
            print(i, area_name, area_url)
            # 获取区域内兼职信息
            job_data_list = spider.get_job_data(area_url)
            with ThreadPoolExecutor(50) as th:
                th.submit(spider.get_job_data)
            if job_data_list:
                for job_data in job_data_list:
                    job_title = job_data['title']
                    job_url = job_data['url']
                    job_pv = job_data['pv']
                    job_date = job_data['date']
                    # 将id(job_title + area_id)进行加密
                    job_id = EDUtil.encrypt(area_id + job_title)
                    db_save.save_job(job_id, job_title, job_url, area_id, job_pv, job_date)
                    # 获取到区域内的兼职信息，将area表中的状态码改为-1
                    db_update.upd_area_status(1, area_id)
            else:
                # 如果没有获取到区域内的兼职信息，将area表中的状态码改为-1
                db_update.upd_area_status(-1, area_id)
            print('存储完成')
    except Exception as e:
        print(repr(e))


def save_job_info():
    """将兼职网页信息存入数据库"""
    # 从数据中调用出job表中的 id 和 url
    job_data_list = db_select.select_col_tbl('job', 'id', 'url')
    i = 0
    for job_data in job_data_list:
        job_id = job_data['id']
        job_url = job_data['url']
        i += 1
        print(i, job_url)
        # 获取兼职页面信息
        job_info_list = spider.get_job_info(job_url)
        with ThreadPoolExecutor(50) as th:
            th.submit(spider.get_job_info)
        # 判断兼职页面是否抓取成功
        if job_info_list:
            for job_info in job_info_list:
                job_type = job_info['type']
                job_headcount = job_info['headcount']
                job_address = job_info['address']
                job_wage = job_info['wage']
                job_detail = job_info['detail']
                com_name = job_info['com_name']
                com_profile = job_info['com_profile']
                com_address = job_info['com_address']
                # 进行存储，并修改job的status为1
                db_update.upd_job_info(job_type, job_headcount, job_address, job_wage, job_detail, com_name,
                                       com_profile,
                                       com_address, 1, job_id)
        else:
            # 将job的status改为-1
            db_update.upd_job_status(-1, job_id)
        print('存储完成')