"""__author__=Hanlin"""
import json
from sqlachemy_helper import *
import requests
# 获取网页


def get_page(page, action):
    url = f'https://list.mogu.com/search?&cKey=15&page={page}&action={action}'
    headers = {
        "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36"
    }

    response = requests.get(url, headers=headers)
    if response.status_code == 200:
        # 因为response.content返回的是字节流，decode之后变成字符串
        return response.content.decode('utf-8')
    return None


# 解析网页
def parse_page(html, action):
    # start_index = html.index('(')
    # html = html[start_index + 1: -2]
    json_data = json.loads(html)
    is_end = json_data['result']['wall']['isEnd']
    results = json_data['result']['wall']['docs']
    for item in results:
        # 放类别进去
        item['category'] = action
        print(item['title'])
        # 用sqlachemy的方式保存到数据库中
        save_goods(item)

    return is_end


# 获取所有的数据
def get_all_page():
    actions = ['clothing','bags']
    for action in actions:
        page = 1
        print(action)
        print('*' * 20)
        while True:
            print(page)
            html = get_page(page, action)
            is_end = parse_page(html, action)
            if is_end:
                break
            page += 1

def main():
    get_all_page()
    # html = get_page()
    # # print(html)
    # parse_page(html)


if __name__ == '__main__':
    main()
