import time
"""
mysql> create table books(id int auto_increment primary key,title varchar(200),a
uthor varchar(200),price float,coverpic varchar(200),descs text);
"""
import requests
from dbutils import DbHelper

def crawl(p):
    start=p*100
    url="http://e.dangdang.com/media/api.go?action=mediaCategoryLeaf&promotionType=1&deviceSerialNo=html5&macAddr=html5&channelType=html5&permanentId=20231204140544425125841869509437859&returnType=json&channelId=70000&clientVersionNo=6.8.0&platformSource=DDDS-P&fromPlatform=106&deviceType=pconline&token=&" \
        f"start={start}&end={start+100}&category=XS2&dimension=dd_sale"
    res=requests.get(url)
    data=res.json()
    books=data["data"]["saleList"]
    datas=[]
    for book in books:
        last=book["mediaList"][0]
        #
        title=last["title"]
        author=last["authorPenname"]
        price=last["lowestPrice"]
        pic=last["coverPic"]
        content=last["descs"]
        datas.append([title,author,price,pic,content])
    helper=DbHelper()
    sql="insert into books(title,author,price,coverpic,descs) values(%s,%s,%s,%s,%s)"
    helper.save(sql,datas)
if __name__ == '__main__':
    for p in range(90,101):
        print(p)
        crawl(p)
        # time.sleep(1)