import requests, os, bs4, json,time

channel = '1014'
# 创建url
url = 'http://www.myzaker.com/channel/'+channel

header = {'Host':'www.myzaker.com','User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:67.0) Gecko/20100101 Firefox/67.0'}

# 创建文件夹xdcd
os.makedirs('zaker',exist_ok=True)
res = requests.get(url,headers=header)
res.raise_for_status()
soup = bs4.BeautifulSoup(res.text,'html.parser')

# 获取新闻列表数据和详情数据
def getActicleList():
    data_list = {}
    arr = []
    comicElem = soup.select('.img')
    for comic  in comicElem:
        #time.sleep(2)
        title = comic.get('title')
        #print('title...',title)
        article_url = 'http:' + comic.get('href')
        #print('article_url...',article_url)
        img_url =  comic.get('style')
        img_url = img_url[21:-2]
        img_url = 'http:' + img_url
        #print('img_url...',img_url)
    
        # 获取文章详情
        res = requests.get(article_url,headers=header)
        # res.raise_for_status()
        try:
            res.raise_for_status()
            soup2 = bs4.BeautifulSoup(res.text,'html.parser')
            articleElem = soup2.select('.article_content  #content')
            all_p = articleElem[0].find_all('p')
            '''
            all_img = articleElem[0].find_all('img')
            for href in all_img:
                hh = href.get('data-original')
                print(hh)
            break
            '''
            arrOfP = []
            for p in all_p:
                
                arrOfP = arrOfP + [str(p)]
                all_img = p.find_all('img')
                for href in all_img:
                    
                    hh = href.get('data-original')
                    w = href.get('data-width')
                    h = href.get('data-height')
                    scale = float(h)/float(w)*100 - 0.2
                    
                    #img_emle = '<img width=\"100%\" height=\"'+ str(scale) +'%' +'\" src=\"'  + str(hh) +'\"/>'
                    img_emle = '<img border-radius=\"5px\" width=\"100%\" height=\"auto\" src=\"'  + str(hh) +'\"/>'
                    
                    #print(hh)
                    ss = str(p) + img_emle
                    arrOfP = arrOfP + [str(ss)]
                    #print(arrOfP)
                    
                
                
           # print('articleElem...',arrOfP)
            

            data = {'title':title,'article_url':article_url,'img_url':img_url,'article_detail':arrOfP}
            arr = arr + [data]
            data_list = {'status_code':'0','data':arr}
            
            print('json catch '+ str(time.time()))
        
        except Exception as exc:
            print('There was a problem: %s' % (exc))
            continue
        jsonData = json.dumps(data_list)
    print('arr===',jsonData)
    baconFile = open('zaker/zaker_item'+ channel +'.json', 'w')
    baconFile.write(jsonData)
    baconFile.close

    print('json catch success')
    


getActicleList()
   
#getItem()




