//本代码将数据摘取出来后经过过滤处理，再存入mysql表格中  20240722
package com

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{Row, SQLContext}
import org.apache.spark.sql.types.{FloatType, StringType, StructField, StructType}
import org.apache.spark.{SparkConf, SparkContext}

import java.util.Properties

object FilterToMySQL {
  def main(args: Array[String]): Unit = {
    //
    val sc = new SparkContext(new SparkConf().setMaster("local").setAppName("阳线大于阴线数据并保存到数据库"))
    val resultRDD=sc.textFile("/clearresultdata")//行数据

      .map(line =>{  //成对     (中际旭创,20121109	2.45	2.44	2.44	2.48	-0.40816286	中际旭创)
        (line.split("\t")(6),line)
      })
      .groupByKey()//分组
      .filter(gpName_lines => {
        var gpName = gpName_lines._1
        var lines = gpName_lines._2
        var yangTimes = 0
        var yinTimes = 0
        for (line <- lines) {
          var zhangfu = line.split("\t")(5).toFloat
          if(zhangfu>0){
            yangTimes += 1
          }else{
            yinTimes +=1
          }
        }
        yangTimes > yinTimes
      }).flatMap(_._2)

    val sqlContext = new SQLContext(sc)

    val rowRDD:RDD[Row] = resultRDD.map(line => {
      var arr = line.split("\\s+")
      Row(arr(0),arr(1).toFloat,arr(2).toFloat,arr(3).toFloat,arr(4).toFloat,arr(5),arr(6),arr(7))
    })


    //创建 schema 用于描述字段信息的目的就是为了映射

    var schema = StructType(
      List(
        StructField("date",StringType),
        StructField("table_stock_openPrice",FloatType),
        StructField("table_stock_closePrice",FloatType),
        StructField("table_stock_lowPrice",FloatType),
        StructField("table_stock_hiPrice",FloatType),
        StructField("table_stock_zhangfu",StringType),
        StructField("table_stock_gpName",StringType),
        StructField("table_stock_volume",StringType)
      )
    )

    //把 rdd类型转成 DataFrame
    //schema 是字段的描述
    var df = sqlContext.createDataFrame(rowRDD,schema);
    //把df保存到数据库
    //封装连接数据库的基本信息
    var prop = new Properties()
    prop.put("user","root")
    prop.put("password","123456")
    prop.put("driver","com.mysql.cj.jdbc.Driver")

    df.write.jdbc("jdbc:mysql://localhost:3306/mydatabase","stock",prop)   //创建新表并写入数据
  }
}

