# coding=utf-8
__author__ = 'syf121@163.com'

import urllib.request as request
from bs4 import BeautifulSoup
from net.yfsun.crawler3.fang.fang import Fang
import os

class AnJuKe(Fang):

    def __init__(self, _db_config_file=None, _cursor_type="list"):
        if _db_config_file is None:
            _db_config_file = os.path.join(os.getcwd(), "config.ini")

        Fang.__init__(self, _db_config_file, _cursor_type)

    def deal_list(self):
        url = self.cf.get("ajk.dl", "list.url")
        origin_bytes = request.urlopen(url).read()
        origin_string = origin_bytes.decode('utf-8')
        soup = BeautifulSoup(origin_string, 'html.parser')
        items = soup.find_all(attrs={"class": "pic"})
        i = 0
        max = self.cf.getint("fang", "max.rows")
        for item in items:
            i += 1
            if i > max:
                break
            print(item['href'])
            self.deal_detail(item['href'])

    def deal_detail(self, url):
        origin_bytes = request.urlopen(url).read()
        origin_string = origin_bytes.decode('utf-8')
        soup = BeautifulSoup(origin_string, 'html.parser')
        fang = Fang()
        fang.name = self.value(soup, 'id', 'j-triggerlayer')
        fang.url = url
        print("楼盘名称:", fang.name)
        print("在售信息:", self.value(soup, 'class', 'lp-tag-status lp-tag-status-xian'))
        print("标签：", self.value(soup, 'class', 'lp-tags').replace("\n", ","))
        print("价格：", self.value(soup, 'class', 'sp-price'))

        sql = "insert into fang (title,url) VALUE ('"+fang.name+"','"+url+"')"
        self.insert(sql)


if __name__ == "__main__":
    ajk = AnJuKe()
    ajk.connect_from_config()
    ajk.deal_list()
    ajk.commit()
    ajk.close()
