# -*- coding: UTF-8 -*-
import httpclient
import config
import download
from bs4 import BeautifulSoup
import mysqlpool
import log
import sys

reload(sys)
sys.setdefaultencoding('utf-8')


def downloadByUrl(url, cityId, brandId, modelsList):
    try:
        html = download.download(url, 2)
        soup = BeautifulSoup(html)
        nexturl = ""
        modelsId = 0
        for ul in soup.find_all("ul"):
            className = ul.get("class", default=None)
            if className is not None:
                if len(className) == 3 and className[0] == "carlist" and className[1] == "clearfix" and className[
                    2] == "js-top":
                    for li in ul.find_all("li"):
                        dataId = li.get("data-scroll-track", default=None)
                        carNo = dataId.split('@')[0]
                        for lia in li.find_all("a"):
                            liaclassName = lia.get("class", default=None)
                            if liaclassName[0] == "car-a":
                                carUrl = lia['href'].split('#')[0]
                        for lidiv in li.find_all("div"):
                            lidivClassName = lidiv.get("class", default=None)
                            if lidivClassName[0] == "t":
                                carName = lidiv.string
                                modelsInfo = carName.split()
                                for models in modelsList:
                                    if models["modelsName"] in modelsInfo[0]:
                                        modelsId = models["modelsId"]
                                        break
                            if lidivClassName[0] == "t-price":
                                for lidivp in lidiv.find_all("p"):
                                    carPrice = lidivp.get_text()[:-1]
                                for lidivem in lidiv.find_all("em"):
                                    originalPrice = lidivem.string
                                    if originalPrice is not None or originalPrice != '':
                                        originalPrice = originalPrice[:-1]
                            if lidivClassName[0] == "t-i":
                                carYearAll = lidiv.get_text()
                                carYear = carYearAll[0:5]
                                carKilometre = carYearAll[6:]

                        sql = "insert into car_info " \
                              "(carNo,carUrl,brandId,modelsId,cityId," \
                              "carStatus,carPrice,originalPrice,carYear,carKilometre,carName) " \
                              "VALUES ('" + carNo + "','" + carUrl + "'," \
                              + str(brandId) + "," + str(modelsId) + "," \
                              + str(cityId) + "," + str(1) + "," + carPrice + "," \
                              + originalPrice + ",'" + carYear + "','" \
                              + carKilometre + "','" + carName + "') "
                        mysqlpool.mysqlinsert(sql)
                if className[0] == "pageLink" and className[1] == "clearfix":
                    for a in ul.find_all("a"):
                        className = a.get("class", default=None)
                        if className is not None:
                            if "next" == className[0]:
                                nexturl = a['href']
        if nexturl != "":
            downloadByUrl(config.getConfig("url", "guaziurl") + nexturl, cityId, brandId, modelsList)
        else:
            return None
    except:
        log.error("失败链接" + url)


cityList = httpclient.sendJson("", config.getConfig("url", "findCity"))
brandList = httpclient.sendJson("", config.getConfig("url", "findBrand"))
for city in cityList:
    cityUrl = city['cityUrl']
    cityId = city['cityId']
    for brand in brandList:
        brandId = brand['brandId']
        brandUrl = brand['brandUrl'][4:]
        data = dict()
        data.setdefault("brandId", brandId)
        modelsList = httpclient.sendJson(data, config.getConfig("url", "findModels"))
        url = config.getConfig("url", "guaziurl") + cityUrl + brandUrl
        try:
            downloadByUrl(url, cityId, brandId, modelsList)
        except:
            log.error("失败链接" + url)

