package rdd;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

public class Spark_100 {
    public static void main(String[] args) {
        final SparkConf conf = new SparkConf();
        conf.setMaster("local[*]");
        conf.setAppName("spark");
        final JavaSparkContext jsc = new JavaSparkContext(conf);
        final JavaRDD<String> rdd = jsc.textFile("data/user.txt");

        JavaRDD<Integer> mapRDD = rdd.map(
                line -> {
                    int age = 0;
                    //因为每一行数据可能都不规范，可能存在有收尾空格，所以进行收尾去空格操作
                    final String lineData = line.trim();
                    //通过索引位取{}内的数据，将{}去除
                    final String attrsData = lineData.substring(1, lineData.length() - 1);
                    //将数据分割，分隔符为，变成属性值：属性名，kv类型的String[]集合
                    final String[] attrs = attrsData.split(",");
                    //遍历每个属性值：属性名，将中间的：去除
                    for (String attr : attrs) {
                        final String[] kv = attr.split(":");
                        for (int i = 0; i < kv.length; i += 2) {
                            if ("\"age\"".equals(kv[i].trim())) {
                                age = Integer.parseInt(kv[i + 1]);
                                break;
                            }
                        }
                    }
                    return age;
                }
        );

        long Avgage = mapRDD.reduce(Integer::sum) / mapRDD.count();
        System.out.println(Avgage);
        jsc.close();
    }
}
