package hivetohbase_java;



import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import org.junit.Test;
import scala.collection.Iterator;
import util.HiveUtil;

public class HiveSpark {
    @Test
//    public void getColumnsInfo1() {
    public Dataset<Row> getColumnsInfo() {
        SparkSession sparkSession = HiveUtil.hiveConnect();
        sparkSession.sql("use test_hive");
//        Dataset<Row> rowDataset = sparkSession.sql("select * from test_hive.bol_dw_main_material_place_order limit 10");
//        for (String col : rowDataset.columns()) {
//            System.out.println(col);
//        }
        Dataset<Row> rowDataset = sparkSession.sql("select ETL_PROC_WID," +
                        "cast(w_insert_dt as string)," +
      "cast(w_update_dt as string)," +
      "order_code," +
      "cast(deco_project_id as string)," +
      "sales_store_code," +
      "sales_store_name," +
      "cast(CUSTOMER_ID as string)," +
      "CUSTOMER_CODE," +
      "CUSTOMER_NAME," +
      "cast(supplier_id as string)," +
      "supplier_name," +
      "supplier_code," +
      "cast(MATERIAL_DESIGNER_ID as string)," +
      "MATERIAL_DESIGNER_NAME," +
      "BRAND_NAME," +
      "cast(SETTLEMENT_STATUS as string)," +
      "cast(CORPORATION_ID as string)," +
      "cast(Main_Material_Order_Node as string)," +
      "cast(pid as string)," +
      "cast(order_date as string)," +
      "cast(MAIN_MATERIAL_CATEGORY_ID as string)," +
      "cast(SETTLEMENT_AMOUNT as string)," +
      "cast(bargain_amount as string)," +
      "report_store_name," +
      "sale_store_name," +
      "store_type_name," +
      "store_syb," +
      "MATERIAL_SALES_CATEGORY_NAME," +
      "GROUP_NAME," +
      "NAME," +
      "MD5(concat_ws('/',etl_proc_wid,order_code)) as rowkey " +
      "from bol_dw_main_material_place_order limit 20");
rowDataset.show();
//        StructType schema = rowDataset.schema();
//        Iterator<StructField> iterator = schema.iterator();
//        while(iterator.hasNext()){
//            StructField next = iterator.next();
//            System.out.println(next.name());
//            System.out.println(next.dataType());
//        }
        return rowDataset;

    }

//        public Dataset<Row> getColumnsInfo() {
//        return null;
//        }

}
