package com.spark.zhou.demo.sparksql.sqlcontext;

import com.spark.zhou.demo.bean.City;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.*;

/**
 * @Description: 测试gz压缩格式的数据操作
 * @Author: ZhOu
 * @Date: 2018/5/31
 */
public class GzData {

    private static final String GZ_PATH = GzData.class.getClass().getResource("/").getPath() + "city.gz";

    public static void main(String[] args) {
        SparkSession sparkSession = SparkSession.builder()
                .appName("GzData")
//                .config("spark.executor.memory", "450m")
//                .config("spark.driver.host", "192.168.66.1")
//                .master("spark://")
                .getOrCreate();
        JavaSparkContext jsc = new JavaSparkContext(sparkSession.sparkContext());

        JavaRDD<String> javaRDD = jsc.textFile(GZ_PATH);
        JavaRDD<City> cityJavaRDD = javaRDD.map(line -> {
            String[] v = line.split("\t");
            City city = new City();
            city.setId(Integer.valueOf(v[0].trim()));
            city.setRegionCode(Integer.valueOf(v[1].trim()));
            city.setRegionName(v[2]);
            city.setFatherRegionName(v[3]);
            city.setFatherRegionCode(Integer.valueOf(v[4].trim()));
            city.setStatus(Integer.valueOf(v[5].trim()));
            city.setIsChina(Integer.valueOf(v[6].trim()));
            return city;
        });

        SQLContext sqlContext = sparkSession.sqlContext();

        Dataset<Row> cityDataset = sqlContext.createDataFrame(cityJavaRDD, City.class);
        try {
            cityDataset.createTempView("city");
        } catch (AnalysisException e) {
            e.printStackTrace();
        }

        sqlContext.sql("select * from city where regionName='上海'").show();

        //山西下面的市的总个数
        sqlContext.sql("select count(*) from city where fatherRegionCode=1156140000").show();
    }
}
