package com.example.toy.engine.dataset;

import com.example.toy.core.dataset.BigCollection;
import com.example.toy.core.dataset.BigPairCollection;
import com.example.toy.core.dataset.Pair;
import com.example.toy.core.engine.EngineContext;
import com.example.toy.core.function.SerializableFunction;
import com.example.toy.core.function.SerializablePairFunction;
import com.example.toy.engine.spark.SparkEngineContext;
import lombok.Getter;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFunction;
import scala.Tuple2;

import java.util.Iterator;
import java.util.List;

/**
 * @author shenb
 * @date 2021-12-20 21:21
 */
@Getter
public class SparkBigCollection<T> implements BigCollection<T> {

  private SparkEngineContext jsc;
  private JavaRDD<T> rdd;

  public SparkBigCollection(SparkEngineContext jsc, JavaRDD<T> rdd) {
    this.jsc = jsc;
    this.rdd = rdd;
  }

  @Override
  public EngineContext getContext() {
    return this.jsc;
  }

  @Override
  public <R> BigCollection<R> map(SerializableFunction<T, R> map) {
    JavaRDD<R> result =
        rdd.map(
            new Function<T, R>() {
              @Override
              public R call(T v1) throws Exception {
                return map.apply(v1);
              }
            });
    return new SparkBigCollection<>(jsc, result);
  }

  @Override
  public List<T> collect() {
    return rdd.collect();
  }

  @Override
  public BigCollection<T> union(BigCollection<T> r) {
    SparkBigCollection<T> sparkList = (SparkBigCollection<T>) r;
    JavaRDD<T> union = rdd.union(sparkList.getRdd());
    return new SparkBigCollection<>(jsc, union);
  }

  @Override
  public <K, V> BigPairCollection<K, V> mapToPair(SerializablePairFunction<T, K, V> map) {
    JavaPairRDD<K, V> result =
        rdd.mapToPair(
            new PairFunction<T, K, V>() {

              @Override
              public Tuple2<K, V> call(T t) throws Exception {
                Pair<K, V> call = map.call(t);
                return new Tuple2<>(call.getLeft(), call.getRight());
              }
            });
    return new SparkBigPairCollection(jsc, result);
  }

  @Override
  public <K> BigPairCollection<K, Iterable<T>> groupBy(SerializableFunction<T, K> map) {
    JavaPairRDD<K, Iterable<T>> result =
        this.rdd.groupBy(
            t -> {
              return map.apply(t);
            });
    return new SparkBigPairCollection<>(jsc, result);
  }

  @Override
  public <R> BigCollection<R> flatMap(SerializableFunction<T, Iterator<R>> map) {
    JavaRDD<R> result =
        rdd.flatMap(
            new FlatMapFunction<T, R>() {
              @Override
              public Iterator<R> call(T t) throws Exception {

                return map.apply(t);
              }
            });
    return new SparkBigCollection<>(jsc, result);
  }
}
