import pymysql
from pyspark.context import SparkContext
import time

# 1、创建spark执行环境
sc = SparkContext(master='local', appName='word_count')

# 统计班级的人数，将统计结果保存到mysql中

student_rdd = sc.textFile("../../data/students.txt")

# 统计班级的人数
clazz_num_rdd = student_rdd.map(lambda stu: (stu.split(",")[-1], 1)).reduceByKey(lambda a, b: a + b)

def to_mysql_fun(kv):

    con = pymysql.connect(host='master', user='root', password='123456', db='shujia')
    cursor = con.cursor()

    for clazz, num in kv:
        cursor.execute("insert into clazz_num(clazz,num) values(%s,%s)", (clazz, num))

        con.commit()

        con.close()

# clazz_num_rdd.foreach(to_mysql_fun)
#优化
# foreachPartition: 一次处理一个分区的数据
# 一般用于将rdd的数据保存到外部系统
clazz_num_rdd.foreachPartition(to_mysql_fun)