package com.transport.analysis.traffic;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class TrafficReducer extends Reducer<Text, Text, Text, Text> {

    private final Text result = new Text();

    @Override
    protected void reduce(Text key, Iterable<Text> values, Context context)
            throws IOException, InterruptedException {

        long totalPassengersOn = 0;
        long recordCount = 0;

        for (Text val : values) {
            String[] parts = val.toString().split("\\t");
            if (parts.length == 2) { // Expecting "passengersOn\tpassengersOn"
                try {
                    totalPassengersOn += Long.parseLong(parts[0]);
                    recordCount++; // Count each record
                } catch (NumberFormatException e) {
                    System.err.println("Error parsing number from value: " + val.toString());
                }
            }
        }

        float averageCrowdLevel = 0.0f;
        if (recordCount > 0) {
            averageCrowdLevel = (float) totalPassengersOn / recordCount;
        }

        // Output: ((线路ID, 站点ID, 小时), 总上车人数, 平均拥挤度)
        result.set(totalPassengersOn + "\\t" + String.format("%.2f", averageCrowdLevel));
        context.write(key, result);
    }
}