import pandas
from sqlalchemy import create_engine


def insert_csv_to_mysql(csv_file, host_url, username, password, database, table_name, chunk_rows):
    engine = create_engine(f'mysql+pymysql://{username}:{password}@{host_url}:3306/{database}')
    total_rows = sum([1 for row in open(csv_file, "r", encoding="utf-8")]) - 1

    insert_row_count = 0
    reader = pandas.read_csv(csv_file, chunksize=chunk_rows)
    for data_frame in reader:
        data_frame.to_sql(name=table_name, con=engine, if_exists="append", index=False)
        insert_row_count = insert_row_count + len(data_frame)
        print(f"本次成功导入了{insert_row_count}条数据到mysql中了(共{total_rows}条数据)")
    print(f"成功导入了{insert_row_count}条数据到mysql中了")


insert_csv_to_mysql(csv_file="F:\\python_workspace\\pythonProject2\\books.csv",
                    host_url="localhost", username="root", password="root", database="bookstore", table_name="books",
                    chunk_rows=3)

# pip install sqlalchemy
# pip install pymysql #可选(本案例应该没用到)
# 升级 pip版本如下：
#     curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py
#     ----------------------------------------------------------------------------------
#     % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
#                                  Dload  Upload   Total   Spent    Left  Speed
#     100 2098k  100 2098k    0     0   411k      0  0:00:05  0:00:05 --:--:--  516k
#
#
#
#
#     C:\Users\admin>     python get-pip.py

# 安装requests：  pip install requests(本案例没有用requests，从某网站爬取url的案例用到了)

# 安装BeautifulSoup：  pip install beautifulsoup4 lxml  (用于解析html文件)(本案例没有用requests，从某网站爬取url的案例用到了)
