import org.apache.spark.sql.SparkSession

object zk3 {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder().master("local[*]").appName("app")
      .config("hive.metastore.uris", "thrift://cdh02:9083")
      .enableHiveSupport().getOrCreate()

    spark.sql("use zg6_stock")
    //spark.sql("create table emp(empno int,ename String,job String,mgr int,hiredate String,sal decimal(10,2),comm decimal(10,2),depton int);")
   // spark.sql("create table dept(deptno int,dname String,loc String);")
    //spark.sql("create table sal(grade_number int,losal_number int,hisal_number int);")

    spark.sql("show tables").show()

    //spark.sql("insert into emp values(7369,'SMITH','CLERK',7902,'1960/12/17',800,0,20);")
//   spark.sql("insert into emp values(7499,'ALLEN','SALESMAN',7698,'1961/2/10',1600,300,30);")
//    spark.sql("insert into emp values(7521,'WARD','SALESMAN',7698,'1961/2/22',1250,500,30);")
//    spark.sql("insert into emp values(7566,'TDMNS','MANAGER',7893,'1961/4/2',2975,0,20);")
//    spark.sql("insert into emp values(7654,'MARTIN','SALESMAN',7698,'1961/9/28',1250,1400,30);")


//    spark.sql("insert into dept values(10,'ACCOUNTING','NEW_YORK');")
//    spark.sql("insert into dept values(20,'RESEAECH','DALLAS');")
//    spark.sql("insert into dept values(30,'SALES','CMICAGO');")
//    spark.sql("insert into dept values(40,'OPERATIONS','BOSTON');")


//    spark.sql("insert into sal values(1,700,1200);")
//    spark.sql("insert into sal values(2,1201,1400);")
//    spark.sql("insert into sal values(3,1401,2000);")
//    spark.sql("insert into sal values(4,2001,3000);")



    spark.sql("select * from emp").show()

    spark.sql("select * from dept").show()

    spark.sql("select * from sal").show()

    //面试SQL需求1：每个部门工资最低员工
    spark.sql("select d.deptno,e.ename,e.sal from emp e,dept d where e.depton=d.deptno group by d.deptno,e.ename,e.sal order by e.sal asc limit 1").show()

    //面试SQL需求2：统计不同工资等级员工数

    spark.stop()

  }
}
