package com.example;

import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
import java.util.*;

public class KNNReducer extends Reducer<Text, Text, Text, Text> {
    private List<List<Double>> trainFeatures = new ArrayList<>();
    private List<Integer> trainLabels = new ArrayList<>();
    private List<List<Double>> testFeatures = new ArrayList<>();
    private int K = 3;

    @Override
    public void reduce(Text key, Iterable<Text> values, Context context) {
        for (Text value : values) {
            String[] parts = value.toString().split(",");
            List<Double> features = new ArrayList<>();
            for (int i = 0; i < parts.length - 1; i++) {
                features.add(Double.parseDouble(parts[i]));
            }
            int label = Integer.parseInt(parts[parts.length - 1]);

            if (key.toString().equals("train")) {
                trainFeatures.add(features);
                trainLabels.add(label);
            } else {
                testFeatures.add(features);
            }
        }
    }

    @Override
    protected void cleanup(Context context) throws IOException, InterruptedException {
        // Normalize features, calculate distances, and predict labels
        // ...
    }

    // Additional helper methods for normalization, distance calculation, and voting
}


