package com.yk.mr.util;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaSparkContext;

import java.io.*;
import java.net.URI;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

/**
 * Created by Limit on 2017/8/18.
 */
public class SparkFileIO {


    private static final SparkFileIO sparkFileIo=new SparkFileIO();

    private JavaSparkContext javaSparkContext;
    private Configuration configuration;

    public SparkFileIO() {
        SparkConf conf = new SparkConf()
                .setAppName("spark io")
                 .setMaster("local");
        this.javaSparkContext = new JavaSparkContext(conf);
        this.configuration=new Configuration();
    }

    public void sparkIOUpload(InputStream inputStream, String outFilePath, String outFileName) throws IOException {
        FileSystem fs = FileSystem.get(URI.create(outFilePath),this.configuration);
        try (BufferedOutputStream outputStream = new BufferedOutputStream(fs.create(new Path(outFilePath + outFileName),true))) {
            byte[] buffer = new byte[1024];
            int len;
            while ((len = inputStream.read(buffer)) != -1) {
                outputStream.write(buffer, 0, len);
            }
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            inputStream.close();
        }
    }

    public void sparkIODownload(String inFilePath, String inFileName,OutputStream outputStream ) throws IOException{
        FileSystem fs = FileSystem.get(URI.create(inFilePath),this.configuration);
        try(BufferedInputStream inputStream = new BufferedInputStream(fs.open(new Path(inFilePath+inFileName)))){
            byte[] buffer = new byte[1024];
            int len;
            while ((len = inputStream.read(buffer)) != -1) {
                outputStream.write(buffer, 0, len);
            }
        }catch(IOException e){
            e.printStackTrace();
        }finally {
           outputStream.close();
        }
    }

    public void sparkDeleteFile(String filePath,String fileName) throws IOException{
        Path path = new Path(filePath+fileName);
        FileSystem fs = FileSystem.get(URI.create(filePath+fileName), this.configuration);
        fs.deleteOnExit(path);
        fs.close();
    }

    public void sparkDeleteFile(String file) throws IOException{
        Path path = new Path(file);
        FileSystem fs = FileSystem.get(URI.create(file), this.configuration);
        fs.deleteOnExit(path);
        fs.close();
    }

    public Map<String, List<Integer>> sparkAnalysisAppData(String file){
        /*1.获取map<路径，数据信息字符串>*/
        JavaPairRDD<String,String> javaPairRDD = javaSparkContext.wholeTextFiles(file);
        Map<String, String> map = javaPairRDD.collectAsMap();
        /*2.创建输出map*/
        Map<String, List<Integer>> valueMap = new LinkedHashMap<>();
        /*3.解析字符串*/
        for (Map.Entry<String, String> entry: map.entrySet()){
            for (String line:  entry.getValue().split("\n")){
                String topic=null;
                for(String word: line.replace("(","").replace(")","").split(",")){
                    if(topic==null){
                        topic=word;
                        valueMap.put(topic,new ArrayList<>());
                    }else {
                        valueMap.get(topic).add(Integer.valueOf(word));
                    }
                }
            }
        }
        return valueMap;
    }


    public void close(){
        this.javaSparkContext.close();
    }

    public static SparkFileIO getSparkFileIo() {
        return sparkFileIo;
    }
}
