package com.car_test;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class GenderAnalysisCombined {

    // ====== 1. 上传本地文件到HDFS的方法 ======
    public static void uploadFileToHDFS(String localFilePath, String hdfsFilePath)
            throws IOException, URISyntaxException, InterruptedException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://192.168.128.130:8020");
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.128.130:8020"), conf, "root");

        Path srcPath = new Path(localFilePath);
        Path dstPath = new Path(hdfsFilePath);

        fs.copyFromLocalFile(srcPath, dstPath);
        System.out.println("文件上传成功！" + localFilePath + " -> " + hdfsFilePath);
        fs.close();
    }

    // ====== 2. Mapper ======
    public static class GenderAnalysisMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
        private static final IntWritable one = new IntWritable(1);
        private Text genderKey = new Text();

        @Override
        protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
            String line = value.toString();
            String[] fields = line.split(",");
            if (fields.length >= 39) {
                String gender = fields[38].trim();
                if ("男性".equals(gender) || "女性".equals(gender)) {
                    genderKey.set(gender);
                    context.write(genderKey, one);
                }
            }
        }
    }

    // ====== 3. Reducer ======
    public static class GenderAnalysisReducer extends Reducer<Text, IntWritable, Text, DoubleWritable> {
        @Override
        protected void reduce(Text key, Iterable<IntWritable> values, Context context)
                throws IOException, InterruptedException {
            int count = 0;
            double total = 0;

            for (IntWritable val : values) {
                count += val.get();
                total += 1;
            }

            double ratio = count / total;
            context.write(key, new DoubleWritable(ratio));
        }
    }

    // ====== 4. Driver ======
    public static void runMapReduceJob(String inputPath, String outputPath)
            throws Exception {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://192.168.128.130:8020");

        Job job = Job.getInstance(conf, "Gender Analysis");
        job.setJarByClass(GenderAnalysisCombined.class);

        job.setInputFormatClass(TextInputFormat.class);
        TextInputFormat.addInputPath(job, new Path(inputPath));

        job.setMapperClass(GenderAnalysisMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);

        job.setReducerClass(GenderAnalysisReducer.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(DoubleWritable.class);

        job.setOutputFormatClass(TextOutputFormat.class);
        TextOutputFormat.setOutputPath(job, new Path(outputPath));

        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }

    // ====== 5. 主方法 ======
    public static void main(String[] args) {
        try {
            // Step 1: 上传文件到HDFS
            String localFilePath = "D:/car/data.csv";
            String hdfsFilePath = "hdfs://192.168.128.130:8020/input/car/data.csv";
            uploadFileToHDFS(localFilePath, hdfsFilePath);

            // Step 2: 提交MapReduce任务分析数据
            String outputPath = "hdfs://192.168.128.130:8020/output/gender_analysis";
            runMapReduceJob(hdfsFilePath, outputPath);

        } catch (Exception e) {
            e.printStackTrace();
        }
    }
}
