package com.example.hadoop.others;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Random;

public class Helper {

    private static String hdfsPath = "hdfs://hadoop000:9000";
    private static String userName = "zhangxinsen";

    /**
     * 获取 HDFS 的 fileSystem
     * @return
     */
    public static Configuration getConfiguration() {
        Configuration configuration = new Configuration();
        configuration.set("fs.defaultFS", hdfsPath);
        return configuration;
    }

    public static FileSystem getFileSystem() throws URISyntaxException, IOException, InterruptedException {
        Configuration configuration = getConfiguration();
        URI uri = new URI(hdfsPath);
        FileSystem fileSystem = FileSystem.get(uri, configuration, userName);
        System.setProperty("HADOOP_USER_NAME", userName);
        return fileSystem;
    }

    /**
     * 调用这个方法的时候会检查本地是否有数据文件, 如果存在会删除重新创建
     * 同时, 文件中每行:
     *  (pIndex, x1, x2)
     * @param nSamples 样本数量
     * @param isNewGenerated 是否需要新建
     */
    public static void generateData(int nSamples, boolean isNewGenerated) throws IOException {
        if (!isNewGenerated) {
            return;
        }
        File file = new File("./cluster.data.txt");
        if (file.exists()) {
            System.out.println("data file will be new generated");
            file.delete();
        }
        else {
            System.out.println("cluster data file will be generated");
        }
        FileOutputStream outputStream = new FileOutputStream("cluster.data.txt");
        int k = 4;
        int[] nums = new int[k];
        Random random = new Random();
        for (int i = 0; i < k-1; i++) {
            nums[i] = (int)(nSamples/k);
        }
        nums[k-1] = nSamples - nums[0] * (k-1);
        double[][] data = new double[nSamples][2];
        int cIndex = 0;
        for (int i = 0; i < k; i++) {
            double[] center = new double[2];
            center[0] = random.nextDouble() * 10;
            center[1] = random.nextDouble() * 10;
            for (int j = 0; j < nums[i]; j++) {
                data[cIndex][0] = center[0] + random.nextDouble() - 0.5;
                data[cIndex][1] = center[1] + random.nextDouble() - 0.5;
                cIndex++;
            }
        }
        for (int i = 0; i < k; i++) {
            int index = random.nextInt(nSamples);
            double temp = data[i][0];
            data[i][0] = data[index][0];
            data[index][0] = temp;
            temp = data[i][1];
            data[i][1] = data[index][1];
            data[index][1] = temp;
        }
        for (int i = 0; i < nSamples; i++) {
            System.out.println(String.format("%d,%f,%f\n", i, data[i][0], data[i][1]).getBytes());
            outputStream.write(String.format("%d,%f,%f\n", i, data[i][0], data[i][1]).getBytes());
        }
        outputStream.close();
    }

    /**
     * 初始化 kmeans 的中心点
     * @param k 需要聚类的数量
     * @param fileSystem HDFS文件系统
     * @param dataPath 聚类数据的路径(hdfs)
     * @throws IOException
     */
    public static void initCenters(int k , FileSystem fileSystem, Path dataPath) throws IOException {
        // StringBuffer buffer = new StringBuffer();
        FSDataInputStream fsr = null;
        BufferedReader bufferedReader = null;
        fsr = fileSystem.open(dataPath);
        bufferedReader = new BufferedReader(new InputStreamReader(fsr));
        String lineContent;
        Path centerPath = new Path("/cluster/kmeans/centers.txt");
        FSDataOutputStream outputStream = fileSystem.create(centerPath, true);
        fileSystem.setReplication(centerPath, (short)(1));
        for (int i = 0; i < k; i++) {
            lineContent = bufferedReader.readLine();
            outputStream.writeUTF(String.format("%s\n", lineContent));
        }
        outputStream.flush();
        outputStream.close();
        bufferedReader.close();
    }

    /**
     * 更新中心点
     * @param k 需要聚类的簇数量
     * @param fileSystem 文件系统
     * @param dataPath 某次迭代后生成的结果文件的路径
     * @throws IOException
     */
    public static void refreshCenters(int k , FileSystem fileSystem, Path dataPath) throws IOException {
        // StringBuffer buffer = new StringBuffer();
        FSDataInputStream fsr = null;
        BufferedReader bufferedReader = null;
        fsr = fileSystem.open(dataPath);
        bufferedReader = new BufferedReader(new InputStreamReader(fsr));
        String lineContent;
        Path centerPath = new Path("/cluster/kmeans/centers.txt");
        FSDataOutputStream outputStream = fileSystem.create(centerPath, true);
        fileSystem.setReplication(centerPath, (short)(1));
        for (int i = 0; i < k; i++) {
            lineContent = bufferedReader.readLine();
            // System.out.println(lineContent.split("\\s")[1]);
            outputStream.writeUTF(String.format("%s\n", lineContent.split("\\s")[1]));
        }
        outputStream.flush();
        outputStream.close();
        bufferedReader.close();
    }


    /**
     * 从 HDFS 中读取质点文件, 并且将其中的数据变成数据点返回
     * @param fileSystem
     * @return
     * @throws IOException
     */
    public static double[][] getCenters(FileSystem fileSystem) throws IOException {
        Path path = new Path("/cluster/kmeans/centers.txt");
        // StringBuffer buffer = new StringBuffer();
        FSDataInputStream fsr = null;
        BufferedReader bufferedReader = null;
        String lineContent = null;
        int nLines = 0;
        fsr = fileSystem.open(path);
        bufferedReader = new BufferedReader(new InputStreamReader(fsr));
        while ((lineContent = bufferedReader.readLine()) != null) {
            nLines++;
        }
        double [][]result = new double[nLines][];
        bufferedReader.close();
        fsr.close();
        fsr = fileSystem.open(path);
        bufferedReader = new BufferedReader(new InputStreamReader(fsr));
        for (int i = 0; i < nLines; i++) {
            lineContent = bufferedReader.readLine();
            String[] temp = lineContent.split(",");
            result[i] = new double[2];
            result[i][0] = Double.parseDouble(temp[1]);
            result[i][1] = Double.parseDouble(temp[2]);
        }
        return result;
    }

    /**
     * 计算两个数据点的l2 归一化距离
     * @param x
     * @param y
     * @return 距离
     */
    public static double calDistance(double[]x, double[]y) {
        if (x.length != y.length) {
            return Double.MAX_VALUE;
        }
        double distance = 0.0;
        double xNorm = 0.0;
        double yNorm = 0.0;
        int length = x.length;
        for (int i = 0; i < length; i++) {
            xNorm += (x[i] * x[i]);
            yNorm += (y[i] * y[i]);
        }
        double []x2 = new double[length];
        double []y2 = new double[length];
        for (int i = 0; i < length; i++) {
            x2[i] = x[i]/ xNorm;
            y2[i] = y[i] / yNorm;
        }
        for (int i = 0; i < length; i++) {
            distance += (Math.pow(x2[i] - y2[i], 2.0));
        }
        return distance;
    }
}
