# coding=utf-8
__author__ = 'Administrator'

from net.yfsun.crawler3.crawler import Crawler
from abc import abstractmethod
import sys
import io
import os


class Fang(Crawler):
    ori_id = ""  # 原网站id
    name = ""  # 楼盘名称
    url = ""
    price = ""  # 总价
    avg_price = ""  # 均价
    remark = ""  # 标签
    location = ""  # 位置
    phone = ""  # 联系电话
    jzcate = ""  # 建筑类型
    chanquan = ""  # 产权年限

    def __init__(self, _db_config_file=None, _cursor_type="list"):
        if _db_config_file is None:
            _db_config_file = os.path.join(os.getcwd(), "config.ini")

        Crawler.__init__(self, _db_config_file, _cursor_type)

    def to_insert_sql(self):
        return "insert into fang () VALUE ()"

    def to_select_sql(self):
        return "select * from fang where "

    @abstractmethod  # 抽象方法
    def deal_list(self):
        pass

    @abstractmethod  # 抽象方法
    def deal_detail(self):
        pass

    def value(self, soup, attrName, attrValue):
        try:
            return soup.find(attrs={attrName: attrValue}).text
        except Exception as e:
            return ''
