/*
 * Copyright (c) 2022. China Mobile (SuZhou) Software Technology Co.,Ltd. All rights reserved.
 * Lakehouse is licensed under Mulan PSL v2.
 * You can use this software according to the terms and conditions of the Mulan PSL v2.
 * You may obtain a copy of Mulan PSL v2 at:
 *          http://license.coscl.org.cn/MulanPSL2
 * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
 * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
 * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
 * See the Mulan PSL v2 for more details.
 */

package com.chinamobile.cmss.spark

import org.apache.spark.sql.SparkSession

object SparkSqlEngine {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().enableHiveSupport().getOrCreate()
    val sqlContext = if (args.length > 0) args(0) else throw new Exception("Can not get sql context!")
    val limitColumns = if (args.length > 1) args(1).toInt else 400

    val df = spark.sql(sqlContext).limit(limitColumns)
    df.cache()

    if (!df.take(1).isEmpty) {
      // when has rows, write to redis
      val resultKey = spark.sparkContext.getConf.get("spark.kubernetes.driver.pod.name")
      df.write
        .format("org.apache.spark.sql.redis")
        .option("table", resultKey)
        .save()
    }
    spark.stop()
  }
}
