package cn.itcast.tags.meta
import org.apache.spark.sql.Column


/**
 *
inType=json
	inPath=datas/json
	selectFieldNames=name
 */
case class ParquetMeta(
										 inPath: String,
										 sperator: String,
										 selectFieldNames: Array[Column]
									 )

object ParquetMeta{

	/**
	 *
	 * @param ruleMap map集合
	 * @return
	 */
	def getParquetMeta(ruleMap: Map[String, String]): ParquetMeta = {

		// 将选择字段构建为Column对象
		import org.apache.spark.sql.functions.col
		val fieldColumns: Array[Column] = ruleMap("selectFieldNames")
			.split(ruleMap("sperator"))
			.map{field => col(field)}

		// 创建HdfsMeta对象并返回
		ParquetMeta(
			ruleMap("inPath"), //
			ruleMap("sperator"), //
			fieldColumns
		)
	}
}