package com.navinfo.tripanalysis.service.impl;

import com.navinfo.tripanalysis.service.LoadTileService;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.util.Iterator;

/**
 * 瓦片与区域数据加载
 * @author 沈东生
 */
public class LoadTileServiceImpl implements LoadTileService {
    private String HDFSPath;

    public JavaPairRDD<Long, Integer> load(SparkSession spark, JavaSparkContext jsc) {
        return spark.read().parquet(getHDFSPath())
                .select("tile_id", "lc01_id").toJavaRDD()
                .mapPartitionsToPair((PairFlatMapFunction<Iterator<Row>, Long, Integer>) rowIterator -> new Iterator<Tuple2<Long, Integer>>() {
                    @Override
                    public boolean hasNext() {
                        return rowIterator.hasNext();
                    }
                    @Override
                    public Tuple2<Long, Integer> next() {
                        Row row = rowIterator.next();
                        Long tileId = row.getLong(row.fieldIndex("tile_id"));
                        Integer lc01Id = row.getInt(row.fieldIndex("lc01_id"));
                        return new Tuple2<>(tileId, lc01Id);
                    }
                });
    }

    public String getHDFSPath() {
        return HDFSPath;
    }

    public void setHDFSPath(String HDFSPath) {
        this.HDFSPath = HDFSPath;
    }
}
