package com.swhy
import co.elastic.clients.elasticsearch.transform.Source
import org.apache.spark.sql.SparkSession

import java.io.File
import scala.io.BufferedSource
import scala.reflect.internal.ClassfileConstants.instanceof
object spark_es2 {
  def main(args: Array[String]): Unit = {
    val data110 = Map("asset_acct" -> List("123456","654321"),
      "age" -> List(123456,654321),
      "price" -> List(BigDecimal(4.14),BigDecimal(3.14))
    )


    for (v <- data110.keys){
      val data111  =data110.get(v).toList
      //println(data111(0)(0))
        println(data111(0)(0).isInstanceOf[BigDecimal])
      }


    //print(data110)
  }


//  def doFile(filePath: String): Any = ???
//
//  def main(args: Array[String]): Unit = {
//    val filePath: String = args(0)
//
//  }
//  def doFile(fileName: String): String = {
//    val file: File = new File(fileName)
//    import java.io.FileInputStream
//    val stream: FileInputStream = new FileInputStream(file)
//    val buff: BufferedSource = Source.fromInputStream(stream)
//    //读取拼装SQL
//    val sql = buff
//      .getLines()
//      .mkString("\n")
//    sql
//  }

}