from bs4 import BeautifulSoup
import requests
import pymysql
import random
from threading import Thread, current_thread
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor

def get_url(url):
    response = requests.get(url)
    return response
# 返回数据表中
def par_html(response):
    html = response.text
    res =[]
    soup = BeautifulSoup(html,'lxml')
    res = [[td.get_text(strip=True) for td in tr.find_all('td')[0:5]]
        for tr in soup.find_all("tr")[1:]]
    return res


def random_code():
    res = ''
    for line in range(76):
        number = str(random.randint(0, 4))
        res += number

    return res

def store_data(NO, SI, ID, MARK, OTHER):
    conn = pymysql.connect(
        host='127.0.0.1',
        port=3306,
        user='root',
        password='admin',
        charset='utf8',
        database='pachong'  # 数据库的库名
    )
    cursor = conn.cursor(pymysql.cursors.DictCursor)
    # tables 数据库的表名 NO, SI, ID, MARK, OTHER
    sql = 'insert into pachong_data(NO, SI, ID, MARK, OTHER) values ("%s", "%s", "%s", "%s", "%s")' % (NO, SI, ID, MARK, OTHER)
    cursor.execute(sql)  # 执行sql语句，返回sql查询成功的记录数目
    conn.commit()  # 提交后才发现表中插入记录成功
   # cursor.close()
   # conn.close()


if __name__ == '__main__':
    # num = 1
   # while True:
    #    try:
            number = random_code()
            url = 'http://127.0.0.1:8000/new.html?PageNo=%s'%number
            #print(url)
            response = get_url(url)
            # res 为爬取的tb中的数据 不包含表头
            res = par_html(response)
            for row in res:
                print(row)
                NO=row[0] 
                SI=row[1] 
                ID=row[2] 
                MARK=row[3] 
                OTHER=row[4]
                store_data(NO, SI, ID, MARK, OTHER)

     #   except Exception as e:
     #       print(e)
