import json
import re
from pprint import pprint
from lxml import etree

from analysis.search.interface import DAInterface
from crawler_core.constant.constants import Constants
from crawler_core.constant.constants_api import ConstantAPI
from crawler_core.exception.analysis_result_exec import AnalysisResultException
from crawler_core.exception.unknown_exec import UnknownException
from lib.sys.logger_factory import LoggerFactory


class DA51job(DAInterface):
    logger = LoggerFactory.getLogger("DA51job", "search")

    def __init__(self):
        pass

    def analysis_cache(self, detail_data_object):
        """获取一个分片任务获得的数据列表"""
        detail_data_list = detail_data_object.get(Constants.CACHE_SEARCH_DATA)
        condition = detail_data_object.get(Constants.CACHE_SEARCH_CONDITION)
        analize_data_list = []
        try:
            for detail_data in detail_data_list:
                analize_data = self.analysis(detail_data)
                analize_data_list.append(analize_data)
        except AnalysisResultException:
            DA51job.logger.info("DA51job 开始解析简历详情页 解析失败")
            data = {Constants.SEARCH_CODE: ConstantAPI.RETURN_ERROR_ANALYSIS}
            return data
        except UnknownException as e:
            DA51job.logger.info("DA51job 开始解析简历详情页 解析失败")
            data = {
                Constants.SEARCH_CODE: ConstantAPI.RETURN_ERROR_UNKNOWN,
                Constants.SEARCH_MSG: json.dumps(e)
            }
            return data
        data = {
            Constants.CACHE_SEARCH_DATA: analize_data_list, Constants.SEARCH_CODE: ConstantAPI.RETURN_SUCCESS
        }
        return data

    def analysis(self, detail_data):
        html = detail_data.get("detail_html")
        corpcode = detail_data.get("corpcode")
        accountno = detail_data.get("accountno")
        username = detail_data.get("username")
        password = detail_data.get("password")
        account = detail_data.get("account")
        try:
            try:
                html = etree.HTML(html)
            except AnalysisResultException:
                raise AnalysisResultException()
            # 4、解析结果，并返回
            try:
                externalId = "".join(html.xpath('//form[@id="form1"]/@action'))
            except AnalysisResultException:
                externalId = ""
            ownership = corpcode + ',' + accountno
            if externalId:
                externalMsg = json.dumps(
                    {"url": "https://ehire.51job.com/Candidate/{}".format(externalId), "username": username,
                     "password": password,
                     "account": account})
                sourceSub = "51job"
                name = "".join(html.xpath("//td[@id='tdseekname']/text()")).strip()
                sex = "".join(html.xpath('//table[@class="infr"]/tbody/tr[3]/td[1]/text()[2]')).rstrip()
                updateDateStr = "".join(html.xpath('//span[@id="lblResumeUpdateTime"]/b/text()'))
                birthday_str = "".join(html.xpath("//table[@class='infr']/tbody/tr[3]/td[1]/text()[3]")).rstrip()
                results = re.match("[\s\S]+?岁[\s\S]+?(\d{4})年(\d{1,2})月(\d{1,2})日[\s\S]+", birthday_str)
                year = results.group(1)
                month = results.group(2)
                day = results.group(3)
                birthdayStr = year + "-" + month + "-" + day
                current_city_str = "".join(html.xpath('//table[@class="infr"]/tbody/tr[3]/td[1]/text()[4]'))
                currentCity = current_city_str.split(" ")[-1].strip()

                try:
                    currentSalary = str(
                        int(''.join(html.xpath("//span[@class='f16']/text()")).strip().split(" ")[0]) * 10000)
                    salaryStart = str(
                        int(''.join(html.xpath("//span[@class='f16']/text()")).strip().split(" ")[0]) * 10000 // 12)
                    salaryEnd = str(
                        int(''.join(html.xpath("//span[@class='f16']/text()")).strip().split(" ")[0]) * 10000 // 12)
                except:
                    currentSalary = "面议"
                    salaryStart = '面议'
                    salaryEnd = '面议'

                expectPay = "".join(html.xpath('//td[text()="期望薪资："]/../td[2]/text()'))
                try:
                    if '元/月' in expectPay:
                        results = re.match('([\s\S]+?)-([\s\S]+?)元/月', expectPay)
                    expectPayStart = results.group(1)
                    expectPayEnd = results.group(2)
                    if '万/年' in expectPay:
                        results = re.match('([\s\S]+?)-([\s\S]+?)元/月', expectPay)
                        expectPayStart = str(int(results.group(1)) // 12)
                        expectPayEnd = str(int(results.group(2)) // 12)
                except:
                    expectPayStart = '面议'
                    expectPayEnd = '面议'

                applyPosition = "".join(html.xpath('//td[text()="职能/职位："]/../td[2]//text()')).strip()
                expectJob = applyPosition
                workPlace = "".join(html.xpath('//td[text()="地点："]/../td[2]//text()')).strip()
                workIndustry = "".join(html.xpath('//td[text()="行业："]/../td[2]//text()')).strip()
                workNature = "".join(html.xpath('//td[text()="工作类型："]/../td[2]//text()')).strip()
                keyword = "".join(html.xpath('//td[text()="关键字："]/../td[2]//text()')).strip()
                # expectPay=html.xpath('//td[text()="期望薪资："]/../td[2]/text()')#8000-9000元/月
                entryDate = "".join(html.xpath('//td[text()="到岗时间："]/../td[2]/text()')).strip()
                evaluation = "".join(html.xpath('//td[text()="自我评价："]/../td[2]/text()')).strip()

                try:
                    workyear_str = "".join(html.xpath('//table[@class="infr"]/tbody/tr[3]/td[1]/text()[5]'))
                    workyear = re.match('([\d]+?)年[\s\S]+', workyear_str).group(1)
                    workYear = str(int(workyear))
                except:
                    workYear = "0"

                workLastCompany = "".join(
                    html.xpath("//td[@class='tba con']/table/tr/td[1]/table/tbody/tr[3]/td[2]/text()")).strip()

                workLastPosition = "".join(
                    html.xpath("//td[@class='tba con']/table/tr/td[1]/table/tbody/tr[2]/td[2]/text()")).strip()

                eduLastEducation = "".join(
                    html.xpath("//td[@class='tba con']/table/tr/td[2]/table/tr[4]/td[2]/text()")).strip()

                eduLastSchool = "".join(
                    html.xpath("//td[@class='tba con']/table/tr/td[2]/table/tr[3]/td[2]/text()")).strip()

                eduLastMajor = "".join(
                    html.xpath("//td[@class='tba con']/table/tr/td[2]/table/tr[2]/td[2]/text()")).strip()

                # 教育经历
                edu_list = []
                try:
                    edu_experiences = html.xpath('//tr[@id="divInfo"]/td/table[4]/tbody/tr[2]/td/table/tbody/tr')
                    if edu_experiences:
                        for edu_experience in edu_experiences:
                            edu_time = ''.join(edu_experience.xpath('./td/table/tr[1]/td[1]/text()'))
                            result_edutime = edu_time.split("-")
                            edu_start_date = result_edutime[0]
                            edu_end_date = result_edutime[1]
                            school = "".join(edu_experience.xpath('./td/table/tr[1]/td[2]/strong/text()'))
                            major = "".join(edu_experience.xpath('./td/table/tr[2]/td/text()[2]'))
                            degree = "".join(edu_experience.xpath('./td/table/tr[2]/td/text()[1]'))
                            data = {
                                "school": school, "degree": degree, "major": major, "startDate": edu_start_date,
                                "endDate": edu_end_date,
                            }
                            edu_list.append(data)
                except:
                    pass
                try:
                    edu_experiences = html.xpath('//tr[@id="divInfo"]/td/table[5]/tbody/tr[2]/td/table/tbody/tr')
                    if edu_experiences:
                        for edu_experience in edu_experiences:
                            edu_time = ''.join(edu_experience.xpath('./td/table/tr[1]/td[1]/text()'))
                            result_edutime = re.match("([\s\S]+?)-([\s\S]+)", edu_time)
                            edu_start_date = result_edutime.group(1)
                            edu_end_date = result_edutime.group(2)
                            school = "".join(edu_experience.xpath('./td/table/tr[1]/td[2]/strong/text()'))

                            major = "".join(edu_experience.xpath('./td/table/tr[2]/td/text()[2]'))

                            degree = "".join(edu_experience.xpath('./td/table/tr[2]/td/text()[1]'))

                            data = {
                                "school": school, "degree": degree, "major": major, "startDate": edu_start_date,
                                "endDate": edu_end_date,
                            }
                            edu_list.append(data)
                except:
                    pass
                try:
                    edu_experiences = html.xpath('//tr[@id="divInfo"]/td/table[6]/tbody/tr[2]/td/table/tbody/tr')
                    if edu_experiences:
                        edu_experiences = html.xpath('//tr[@id="divInfo"]/td/table[6]/tbody/tr[2]/td/table/tbody/tr')
                        for edu_experience in edu_experiences:
                            edu_time = ''.join(edu_experience.xpath('./td/table/tr[1]/td[1]/text()'))
                            result_edutime = edu_time.split("-")
                            edu_start_date = result_edutime[0]
                            edu_end_date = result_edutime[1]
                            school = "".join(edu_experience.xpath('./td/table/tr[1]/td[2]/strong/text()'))
                            major = "".join(edu_experience.xpath('./td/table/tr[2]/td/text()[2]'))
                            degree = "".join(edu_experience.xpath('./td/table/tr[2]/td/text()[1]'))

                            data = {
                                "school": school, "degree": degree, "major": major, "startDate": edu_start_date,
                                "endDate": edu_end_date, }
                            edu_list.append(data)
                except:
                    pass

                # 工作经历
                work_list = []
                try:
                    work_experiences = html.xpath('//tr[@id="divInfo"]/td/table[3]/tr[2]/td/table/tr')
                    if work_experiences:
                        for work_experience in work_experiences:
                            work_time = "".join(work_experience.xpath('./td/table/tbody/tr[1]/td[1]/text()'))
                            work_start_date = work_time.split("-")[0]
                            work_end_date = work_time.split("-")[1]
                            company = "".join(work_experience.xpath('./td/table/tbody/tr[1]/td[2]/span[1]/text()'))
                            position = "".join(work_experience.xpath('./td/table/tbody/tr[3]/td[2]/strong/text()'))
                            work_duty = "".join(
                                work_experience.xpath('./td/table/tbody/tr[4]/td/table/tbody/tr/td[2]/text()'))

                            data = {
                                "company": company, "position": position, "startDate": work_start_date,
                                "endDate": work_end_date,
                                "duty": work_duty, }
                            work_list.append(data)
                except:
                    pass
                work_experiences = html.xpath('//tr[@id="divInfo"]/td/table[4]/tr[2]/td/table/tr')
                # print(work_experiences)
                try:
                    if work_experiences:
                        for work_experience in work_experiences:
                            work_time = "".join(work_experience.xpath('./td/table/tbody/tr[1]/td[1]/text()'))

                            work_start_date = work_time.split("-")[0]
                            work_end_date = work_time.split("-")[1]
                            company = "".join(work_experience.xpath('./td/table/tbody/tr[1]/td[2]/span[1]/text()'))
                            position = "".join(work_experience.xpath('./td/table/tr[3]/td[2]/strong/text()'))
                            work_duty = "".join(
                                work_experience.xpath('./td/table//tbody/tr[4]/td/table/tbody/tr/td[2]/text()'))
                            data = {
                                "company": company, "position": position, "startDate": work_start_date,
                                "endDate": work_end_date,
                                "duty": work_duty, }
                            work_list.append(data)
                except:
                    pass

                # 项目经历
                project_list = []
                try:
                    projects = html.xpath('//tr[@id="divInfo"]/td/table[3]/tr[2]/td/table/tr')
                    if projects:
                        for project in projects:
                            project_time = "".join(project.xpath('./td/table/tr[1]/td[1]/text()'))
                            project_start_date = project_time.split("-")[0]
                            project_end_date = project_time.split("-")[1]
                            projectname = "".join(project.xpath('./td/table/tr[1]/td[2]/strong/text()'))
                            project_description = "".join(
                                project.xpath('./td/table/tr[3]/td/table/tbody/tr/td[2]/text()'))
                            project_duty = "".join(project.xpath('./td/table/tr[4]/td/table/tbody/tr/td[2]/text()'))
                            data = {
                                "projectName": projectname, "description": project_description,
                                "startDate": project_start_date,
                                "endDate": project_end_date,
                                "duty": project_duty, }
                            project_list.append(data)
                except:
                    pass
                try:
                    projects = html.xpath('//tr[@id="divInfo"]/td/table[4]/tr[2]/td/table/tr')
                    if projects:
                        for project in projects:
                            project_time = "".join(project.xpath('./td/table/tr[1]/td[1]/text()'))
                            project_start_date = project_time.split("-")[0]
                            project_end_date = project_time.split("-")[1]
                            projectname = "".join(project.xpath('./td/table/tr[1]/td[2]/strong/text()'))
                            project_description = "".join(
                                project.xpath('./td/table/tr[3]/td/table/tbody/tr/td[2]/text()'))
                            try:
                                project_duty = "".join(project.xpath('./td/table/tr[4]/td/table/tbody/tr/td[2]/text()'))
                            except:
                                project_duty = ""
                            data = {
                                "projectName": projectname, "description": project_description,
                                "startDate": project_start_date,
                                "endDate": project_end_date,
                                "duty": project_duty, }
                            project_list.append(data)
                except:
                    pass
                try:
                    projects = html.xpath('//tr[@id="divInfo"]/td/table[5]/tr[2]/td/table/tr')
                    if projects:
                        for project in projects:
                            project_time = "".join(project.xpath('./td/table/tr[1]/td[1]/text()'))
                            project_start_date = project_time.split("-")[0]
                            project_end_date = project_time.split("-")[1]
                            projectname = "".join(project.xpath('./td/table/tr[1]/td[2]/strong/text()'))
                            project_description = "".join(
                                project.xpath('./td/table/tr[3]/td/table/tbody/tr/td[2]/text()'))
                            try:
                                project_duty = "".join(project.xpath('./td/table/tr[4]/td/table/tbody/tr/td[2]/text()'))
                            except:
                                project_duty = ""

                            data = {
                                "projectName": projectname, "description": project_description,
                                "startDate": project_start_date,
                                "endDate": project_end_date,
                                "duty": project_duty, }
                            project_list.append(data)
                except:
                    pass

                basicInfo = {
                    "name": name,
                    "sex": sex,
                    "mobile": "",
                    "email": "",
                    "birthdayStr": birthdayStr,
                    "currentAddress": "",
                    "currentCity": currentCity,
                    'currentSalary': currentSalary,
                    "salaryStart": salaryStart,
                    'salaryEnd': salaryEnd,
                    "workYear": workYear,
                    "workLastPosition": workLastPosition,
                    "workLastCompany": workLastCompany,
                    "eduLastEducation": eduLastEducation,
                    "eduLastSchool": eduLastSchool,
                    "eduLastSchoolTag": "",
                    "eduLastMajor": eduLastMajor
                }
                jobIntention = {
                    "expectJob": expectJob,
                    "expectPayStart": expectPayStart,
                    "expectPayEnd": expectPayEnd,
                    "workPlace": workPlace,
                    "workIndustry": workIndustry,
                    "workNature": workNature,
                    "entryDate": entryDate,
                    "keyword": keyword
                }
                selfEvaluation = {
                    "evaluation": evaluation, }
                data_1 = {
                    "ownership": ownership,
                    "externalId": externalId, "externalMsg": externalMsg, "sourceSub": sourceSub,
                    "updateDateStr": updateDateStr, "basicInfo": basicInfo, "jobIntention": jobIntention,
                    'selfEvaluation': selfEvaluation, "works": work_list, "projects": project_list,
                    'educations': edu_list, }

                if edu_list != [] and (work_list != [] or project_list != []):
                    data = {
                        Constants.CACHE_SEARCH_DATA: data_1, Constants.SEARCH_CODE: ConstantAPI.RETURN_SUCCESS
                    }
                    pprint(data)
                    return data
                else:
                    pass
            else:
                pass

        except:
            DA51job.logger.info("DA51job 开始解析简历详情页 解析失败")
            raise AnalysisResultException


# if __name__ == '__main__':
#     with open('data.json', 'r') as f:
#         data = json.load(f)
#
#     detail_data = data.get("data")[0]
#     pprint(detail_data)
#     spider = DA51job()
#     spider.analysis_cache(detail_data)
