package com.dtsw.jiangxi;

/*
提交命令
* spark-submit --master yarn --deploy-mode client \
--name Sparktest --executor-memory 3g \
--num-executors 5  --executor-cores 1 \
--driver-memory 4g  --class com.dtsw.jiangxi.test /data4/gis_data/tianchuan/roadTestSiteAlgorithm-1.0-SNAPSHOT.jar
* */
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;


import java.util.HashMap;
import java.util.Map;

public class test {
    public static void main(String[] args) {
        Map<String,String> params=parseParameters(args);
        String database=params.getOrDefault("database","default");
        String provincecode=params.getOrDefault("p_provincecode","360000");
        SparkSession spark = SparkSession.builder()
                .enableHiveSupport()
                .appName("SparkSQLExample")
                .getOrCreate();

        // 在这里可以进行Spark SQL的相关操作
        spark.sql("user "+database);
       Dataset<Row> ds= spark.sql("select * from cfg_town limit 10");
       System.out.println("查询结果：");
        ds.show();
        spark.stop();
    }

    /**
     * 解析参数传参
     * @param args
     * @return
     */
    private static Map<String, String> parseParameters(String[] args) {
        Map<String, String> params = new HashMap<String,String>();
        for (String arg : args) {
            // 处理--key=value格式的参数
            if (arg.startsWith("--")) {
                String[] parts = arg.substring(2).split("=", 2);
                if (parts.length == 2) {
                    params.put(parts[0], parts[1]);
                }
            }
        }
        return params;
    }
}

