object WindowsSocketCSVProcessor {
  def main(args: Array[String]): Unit = {
    //读取csv文件并分析
    System.setProperty("hadoop.home.dir", "C:\\path\\to\\hadoop")
    val conf = new SparkConf()
      .setAppName("WindowsSocketCSVProcessor")
      .setMaster("local[2]")
    val ssc = new StreamingContext(conf, Seconds(5))
    val lines = ssc.socketTextStream("localhost", 9999)
    val schema = StructType(Array(
      StructField("id", IntegerType, nullable = true),
      StructField("name", StringType, nullable = true),
      StructField("age", IntegerType, nullable = true),
      StructField("city", StringType, nullable = true)
    ))
    val processedData = lines.flatMap { line =>
      try{
        val fields = line.split(",").map(_.trim)
        if (fields.length == schema.length) {
          Some(Row(
            fields(0).toInt,
            fields(1),
            fields(2).toInt,
            fields(3)
           ))
        }else{
          None
        }
      }
      catch {
        case e: Exception =>
          println(s"Error parsing line: $line, ${e.getMessage}")
          None
      }
    }
    processedData.foreachRDD { rdd =>
      if (!rdd.isEmpty()) {
        val spark =
          SparkSession.builder.config(rdd.sparkContext.getConf).getOrCreate()
        import spark.implicits._
        val df = spark.createDataFrame(rdd, schema)
        val cityCounts = df.groupBy("city").count()
        println("=== 按城市统计人数 ===")
        cityCounts.show()
        val ageStats = df.describe("age")
        println("=== 年龄统计 ===")
        ageStats.show()
      }
    }
    lines.count().map(cnt => s"Received $cnt records").print()
    ssc.start()
    ssc.awaitTermination()
      }



}
