package com.navinfo.opentsp.platform.computing.analysis.util;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.Path;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.broadcast.Broadcast;
import org.apache.hadoop.fs.FileSystem;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import scala.Tuple2;

import java.io.*;
import java.net.URI;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Optional;

/**
 * Created by neo on 2017/6/15.
 */
public class TileMap implements Serializable{

    private static volatile Broadcast<HashMap<Long, String>> instance = null;

    public static Broadcast<HashMap<Long, String>> getInstance(JavaSparkContext jsc, String path) {
        if (instance == null) {
            synchronized (TileMap.class) {
                if (instance == null) {
                    instance = jsc.broadcast(loadTileMap(path));
                }
            }
        }
        return instance;
    }

    public static Broadcast<HashMap<Long, String>> getInstance(SparkSession spark,JavaSparkContext jsc, String sql) {
        if (instance == null) {
            synchronized (TileMap.class) {
                if (instance == null) {
                    instance = jsc.broadcast(loadTileMap(spark,sql));
                }
            }
        }
        return instance;
    }

    public static void main(String[] args) {
        String line = "152308112493 653000 650000";
        String line1 = "152308112493 653000 0";

        String v=line;
        String key1 = v.substring(0, 12);
        String value1 = v.substring(13, 19);
        String value2 = "0";

        if (v.length() > 25) {
            value2 = v.substring(20, 26);
        }

        Long key = Long.valueOf(key1);
        String value = value1 + "," + value2;
        System.out.println(key);
        System.out.println(value);
//        loadTileMap("/LC_DISTRICT_AND_TILE_MAPPING.sql");
    }

    private static HashMap<Long, String> loadTileMap(String filePath) {
        Configuration conf = new Configuration();
        FileSystem fs = null;
        BufferedReader reader = null;
        HashMap<Long, String> map = new HashMap<Long, String>();

        Path path = new Path(filePath);
        FSDataInputStream is = null;
        try {
            fs = FileSystem.newInstance(URI.create(filePath), conf);
            is = fs.open(path);
            reader = new BufferedReader(new InputStreamReader(is));
            String v = null;
            while ((v = reader.readLine()) != null) {
                String key1 = v.substring(0, 12);
                String value1 = v.substring(13, 19);
                String value2 = "0";

                if (v.length() > 25) {
                    value2 = v.substring(20, 26);
                }

                Long key = Long.valueOf(key1);
                String value = value1 + "," + value2;
                map.put(key, value);
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                if(reader != null){
                    reader.close();
                }
                if(fs != null){
                    fs.close();
                }
                if(is != null){
                    is.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return map;
    }


    private static HashMap<Long, String> loadLocalTileMap(String filePath) {
        BufferedReader reader = null;
        HashMap<Long, String> map = new HashMap<Long, String>();
        try(FileInputStream fis = new FileInputStream(filePath)) {
            reader = new BufferedReader(new InputStreamReader(fis));
            String v = null;
            while ((v = reader.readLine()) != null) {
                String key1 = v.substring(0, 12);
                String value1 = v.substring(13, 19);
                String value2 = "0";

                if (v.length() > 25) {
                    value2 = v.substring(20, 26);
                }

                Long key = Long.valueOf(key1);
                String value = value1 + "," + value2;
                map.put(key, value);
            }
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                if(reader != null){
                    reader.close();
                }
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return map;
    }

    private static HashMap<Long, String> loadTileMap(SparkSession spark,String sql) {
        Dataset<Row> tileDs = spark.sql(sql);
        JavaRDD<Row> rowRdd = tileDs.toJavaRDD();
        JavaPairRDD<Long,String> tileRDD = rowRdd.mapPartitionsToPair((PairFlatMapFunction<Iterator<Row>, Long, String>) itors
                -> new Iterator<Tuple2<Long, String>>() {
                    @Override
                    public boolean hasNext() {
                        return itors.hasNext();
                    }

                    @Override
                    public Tuple2<Long, String> next() {
                        Row row = itors.next();
                        long tile = Long.parseLong(String.valueOf(Optional.ofNullable(row.getAs("tile_id")).orElse("0")));
                        String lc01_id = String.valueOf(Optional.ofNullable(row.getAs("lc01_id")).orElse("0"));
                        String lc02_id = String.valueOf(Optional.ofNullable(row.getAs("lc02_id")).orElse("0"));
                        String value = lc01_id+","+lc02_id;
                        return new Tuple2<>(tile,value);
                    }
                }
        );
        Map<Long,String> map = tileRDD.collectAsMap();
        HashMap<Long,String> tileMap = new HashMap<>(map);
        return tileMap;

    }

}
