
from urllib.request import  urlopen
import requests
from bs4 import BeautifulSoup
import pymongo
import time
client = pymongo.MongoClient("mongodb+srv://admin:9936@cluster0-nwbxb.mongodb.net/test?retryWrites=true&w=majority")
db = client.test

p=db.testset

def search_keyword(keyword):
    uri = 'https://you.163.com/xhr/search/search.json'
    query = {
        "keyword": keyword,
        "page": 1
    }
    try:
        res = requests.get(uri, params=query).json()
        result = res['data']['directly']['searcherResult']['result']
        product_id = []
        for r in result:
            product_id.append(r['id'])
        return product_id
    except:
        raise
def details(product_id):
    url = 'https://you.163.com/xhr/comment/listByItemByTag.json'
    try:
        C_list = []
        for i in range(1, 100):
            query = {
                "itemId": product_id,
                "page": i,
            }
            res = requests.get(url, params=query).json()
            if not res['data']['commentList']:
                break
            print("爬取第 %s 页评论" % i)
            commentList = res['data']['commentList']
            C_list.append(commentList)
            time.sleep(1)
            # save to mongoDB
            try:
                p.insert_many(commentList)
            except:
                continue
        return C_list
    except:
        raise
search_keyword("帆布鞋")
details(p)