# -*_ codeing=utf-8 -*-
# @Time: 2023/1/23 0:39
# @Author: foxhuty
# @File: to_mysql_with_large_data.py
# @Software: PyCharm
# @Based on python 3.10

from sqlalchemy import create_engine
import pandas as pd
import mysql.connector


def create_conn_cursor():
    conn = mysql.connector.connect(host='162.14.114.15', user='huty_ec',
                                   passwd='foxmmer123', database='db_ec',
                                   auth_plugin='mysql_native_password')
    cursor = conn.cursor()
    return conn, cursor


def create_table():
    conn, cursor = create_conn_cursor()
    sql = """
    create table tb_word_checked (
    id int auto_increment primary key,
    word varchar(20) not null,
    english_definition varchar(1000),
    chinese_translation varchar(1000)
    )
    """
    cursor.execute(sql)
    conn.commit()
    cursor.close()
    conn.close()


def read_into_mysql(file_path, tb_name, db_name='db_ec'):
    """
    适合把大容量数据写入MySql.
    :param file_path: 文件位置
    :param tb_name: 存放数据的表名
    :param db_name: 数据库名
    """
    engine = create_engine(f"mysql+pymysql://huty_ec:foxmmer123@162.14.114.15:3306/{db_name}")
    # conn = engine.connect()
    df = pd.read_csv(file_path, encoding='utf8', low_memory=False)
    df = df[['word', 'phonetic', 'definition', 'translation']]
    print(df.shape)
    df.index.name = 'id'
    df.to_sql(name=tb_name, con=engine, if_exists='replace', chunksize=100000)
    # conn.close()
    print('successfully saved to mysql')


if __name__ == '__main__':
    file = r'D:\PycharmProjects\EC_Dict\ECDICT-master\stardict.csv'
    read_into_mysql(file, 'tb_new_dict')


