# encoding: utf-8
"""
@author: 夏洛
@QQ: 1972386194
@file: 02-mysql入库实战.py
"""

'''
虎牙
    地址：https://www.huya.com/cache.php?m=LiveList&do=getLiveListByPage&gameId=1663&tagAll=0&callback=getLiveListJsonpCallback&page=1
    字段： 标题  UP主  
    
    
    API数据反爬虫观察   3部分就可以辨别 反爬
    请求头  
    请求体-》 载荷
    cookie  
        
'''
import requests
import pymysql

def conn_mysql():
    db = pymysql.connect(host='localhost', user='root', password='', port=3306, db='xxxxx')
    cursor = db.cursor()
    return db,cursor

def get_huya_data():
    url = 'https://www.huya.com/cache.php?m=LiveList&do=getLiveListByPage&gameId=1663&tagAll=0&page=3'
    res = requests.get(url)
    # 如果res.json()  给你报错了  先查看数据格式
    try:
        items = res.json()
        data = items.get('data').get('datas')
        datas = []
        for it in data:
            title = it.get('introduction')
            nick = it.get('nick')
            totalCount = it.get('totalCount')
            # 如果想获取更过，根据对应的key value 进行提取就好
            datas.append([title,nick,totalCount])  # 是不是列表嵌套列表  [[],[],[],[]]
        save_mysql(datas)
    except Exception as e:
        print(e)

def save_mysql(data):
    db, cursor = conn_mysql()
    for d in data:
        sql = 'insert into xx (title,nick,star) values (%s, %s, %s)'
        try:
            cursor.execute(sql, (d[0],d[1],d[2]))
            db.commit()
        except Exception as e:
            print(e)
            db.rollback()

if __name__ == '__main__':
    get_huya_data()
