package org.shj.spark.dataframe;

import org.apache.spark.sql.Column;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import static org.apache.spark.sql.functions.*;

/**
 * 文件格式：
 * date:  yyyy-MM-dd
 * timestamp: 时间戳
 * userId
 * pageId
 * chanel: 具体板块
 * ip: 访问者地址 
 */
public class SparkSQLHive {

	public static void main(String[] args) {
		SparkSession spark = SparkSession.builder().appName("SparkSQLHiveSample").enableHiveSupport().getOrCreate();
		spark.sparkContext().setLogLevel("WARN");
		
		spark.sql("use spark_hive");
		
		String pvSql = "select dateStr, pageId, count(pageId) as pv "
				+ "from forum group by dateStr, pageId order by pv desc limit 10";
		Dataset<Row> pv = spark.sql(pvSql);
		pv.show();
		
		Dataset<Row> uv = spark.sql("select dateStr, pageId, count(pageId) uv from "
				+ "(select distinct dateStr, pageId, userId from forum) "
				+ "group by dateStr, pageId order by uv desc");
		uv.show(10);
		
		Dataset<Row> df = spark.sql("select dateStr, pageId, userId from forum");
		
		//此处结果和上面一样
		df.distinct().groupBy("dateStr", "pageId")
		.agg(count("userId").alias("cnt")).sort(desc("cnt")).show(10);
		
		spark.stop();
	}

}
