'''
Created on 2017-6-20

@author: 24253
'''
from zs.content.baseurlcontent import BaseUrlContent
from zs.zsqueue import zscontentqueue

from zs.util.download import request
from zs.write.write import write_content
import logging
from bs4 import BeautifulSoup as BS
class xiaomi(BaseUrlContent):
    name="xiaomi"
    def __init__(self):
        logging.debug("[content]["+self.name+"]:start!" )
        
    def run(self):
        while True:
            try:
                url = zscontentqueue.get(self.name)
                logging.debug("[content]["+self.name+"]:"+url)
                self.get_content_(url)
            except Exception as ex:
                logging.error("[content]["+self.name+"]:"+str(url)+"解析失败！")
                logging.error(ex)
    def get_content_(self,url):
        html = request.get(url,3)
        soup=BS(html,"lxml")
        
        result_list={}
        
        topdiv=soup.find("div",attrs={"class","tb-property tb-property-x"})
        btdiv=topdiv.find("h3").text
        result_list["标题"]=btdiv.replace("\n","").replace('<span class=H>', '').replace('</span>', '').replace("\n","").replace(" ","")
        
        jgdiv=topdiv.find("ul",attrs={"class":"tb-meta"})   
        jg=jgdiv.find("strong").text
        result_list["价格"]=jg
        
        
        xldiv=topdiv.find("div",attrs={"class":"tb-sell-counter"}) 
        xl=xldiv.find("strong").text
        result_list["销量"]=xl
            
         
        write_content(self.name,result_list)
#         add_sql(self.name,result)
    