package com.mapreduce.partitioner;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

//map阶段
public class SalaryTotalMapper extends Mapper<LongWritable, Text, IntWritable, Employee> {
    private   IntWritable k2 = new IntWritable(); //k2
    //private IntWritable v2 = new IntWritable(); //工资作为v2
    private Employee emp = new Employee();// 员工对象作为v2
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        //1. v1由Text类型转换为String：toString()方法
        String line = value.toString();
        //2. 按逗号进行分词：split(“,”)方法
        String[] datas = line.split(",");
        //3. 取出需要的字段：部门号：字段[7]、工资：字段[5]
       //  String deptNo = datas[7];
       // IntWritable k2 = new IntWritable();
       // k2.set(Integer.parseInt(deptNo));
       // IntWritable v2 = new IntWritable();
        //v2.set(Integer.parseInt(datas[5]));
        //{"7369","SMITH","CLERK","7902","1980/12/17","800","","20"}

        //Employee emp = new Employee();
        emp.setEmpno(Integer.parseInt(datas[0]));
        emp.setEname(datas[1]);
        emp.setJob(datas[2]);
        try{
            emp.setMgr(Integer.parseInt(datas[3]));
        }catch (Exception e){
            //通过数据观察经理id可能为空，为空会报异常，
            //此时报异常就经理id这个属性设置一个默认值
            emp.setMgr(-1);
        }
        emp.setHiredate(datas[4]);
        emp.setSal(Integer.parseInt(datas[5]));
        try {
            emp.setComm(Integer.parseInt(datas[6]));
        }catch (Exception e){
            //通过数据观察奖金可能为空，为空会报异常，
            //此时报异常就奖金这个属性设置一个默认值
            emp.setComm(0);
        }
        emp.setDeptno(Integer.parseInt(datas[7]));


        k2.set(emp.getDeptno());//部门编号作为K2

        //4. 输出k2, v2：context.write()方法
        // context.write(k2,v2);
       context.write(k2,emp);
    }
}
