package com.huahua.bigdata.spark.req;

import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;

public class UserAvgAge {
    public static void main(String[] args) {
        SparkConf conf = new SparkConf();
        conf.setMaster("local");
        conf.setAppName("UserAvgAge");
        JavaSparkContext jsc = new JavaSparkContext(conf);
        // TODO 将文件作为数据源，对接RDD
        JavaRDD<String> dataRDD = jsc.textFile("data/user.txt");

        // TODO 获取年龄的平均值
        JavaRDD<Integer> ageRDD = dataRDD.map(
                line -> {
                    int age = 0;
                    String lineData = line.trim();
                    String attrsData = lineData.substring(1, lineData.length() - 1);

                    String[] attrs = attrsData.split(",");
                    for (String attr : attrs) {
                        String[] kv = attr.trim().split(":");
                        for (int i = 0; i <= kv.length; i += 2) {
                            if ("\"age\"".equals(kv[i].trim())) {
                                age = Integer.parseInt(kv[i + 1]);
                                break;
                            }
                        }
                    }
                    return age;
                }
        );
        long avgAge = ageRDD.reduce(Integer::sum) / ageRDD.count();

        System.out.println(avgAge);

        jsc.close();
    }
}
