# coding=utf-8
# 链家实体解析
import sys

sys.path.append("..")
sys.path.append("../../")
import json

from i_entity_extractor.extractors.default.default_extractor import DefaultExtractor


class LianjiaExtractor(DefaultExtractor):
    def __init__(self, topic_info, log):
        DefaultExtractor.__init__(self, topic_info, log)

    def entity_extract(self, parse_info, extract_data):
        '''链家实体解析入口'''
        extract_info = parse_info.extract_info
        url = parse_info.base_info.url
        self.log.info("lianjia_start_parse\turl:%s\ttopic_id:%s" % (url, extract_info.topic_id))
        entity_data = self.format_extract_data(extract_data)

        return entity_data

    def format_extract_data(self, extract_data):
        '''实体解析抽取数据'''

        entity_data = {
            "name": extract_data.get("name"),
            "id": extract_data.get("id"),
            "url": extract_data.get("url"),
            "build_time": extract_data.get("build_time"),
            "address": extract_data.get("address"),
            "city": extract_data.get("city"),
            "district": extract_data.get("district"),
            "property_fee": extract_data.get("property_fee"),
            "property": extract_data.get("property"),
            "developer": extract_data.get("developer"),
            "basic_profile": extract_data.get("basic_profile"),
            "house": extract_data.get("house"),
            "households": extract_data.get("households"),
            "school": extract_data.get("school"),
            "avg_price": extract_data.get("avg_price"),
            "price_MoM": extract_data.get("price_MoM"),
            "price_YoY": extract_data.get("price_YoY"),
            "deal_lstmonth": extract_data.get("deal_lstmonth"),
            "cnt_lease": extract_data.get("cnt_lease"),
            "subway": extract_data.get("subway"),
            "cnt_sale": extract_data.get("cnt_sale"),
            "deal_90d": extract_data.get("deal_90d"),

        }
        return entity_data


if __name__ == "__main__":

    import sys

    sys.path.append('../../')
    topic_id = 99

    import pytoml
    from conf import get_config
    from bdp.i_crawler.i_extractor.ttypes import BaseInfo, CrawlInfo, ExtractInfo, PageParseInfo

    with open('../../entity.toml', 'rb') as config:
        config = pytoml.load(config)
    conf = get_config(config)
    import common
    from entity_extractor_route import entity_route_obj

    topic_info = entity_route_obj.read_topics()[topic_id]
    obj = LianjiaExtractor(topic_info, common.log)
    extract_data = {
        "cnt_lease": "16",
        "cnt_sale": "48",
        "deal_90d": "20",
        "id": "2411050506921",
        "subway": ""
    }
    src_url = "www.baidu.com"
    data = json.dumps(extract_data)
    extract_info = ExtractInfo(ex_status=2, extract_data=data)
    base_info = BaseInfo(url=src_url)
    parser_info = PageParseInfo(base_info=base_info, extract_info=extract_info)
    data = obj.entity_extract(parser_info, extract_data)
    print src_url

    for key, value in data.items():
        if isinstance(value, list):
            for i in value:
                print key, ":", i
        elif isinstance(value, dict):
            for key2, value2 in value.items():
                print key2, ":", value2
        else:
            print key, ":", value
