package com.huan.table;

import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;

import java.io.IOException;

public class TableMapper extends Mapper<LongWritable, Text,Text ,TableBean> {

    //TODO 定义文件名称
    String name;

    @Override
    protected void setup(Context context) throws IOException, InterruptedException {
        //TODO 获取文件名称
        FileSplit inputSplit = (FileSplit) context.getInputSplit();

        name = inputSplit.getPath().getName();
    }


    TableBean tableBean = new TableBean();
    Text k = new Text();
    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

        //TODO 获取一行
        String line = value.toString();
        if (name.startsWith( "order" )){ //TODO 订单表
            //TODO 切分
            String[] fields = line.split( "\t" );
            //TODO 封装key和value
            tableBean.setId( fields[0] );
            tableBean.setPid(fields[1]);
            tableBean.setAmount( Integer.parseInt( fields[2] ) );
            tableBean.setName( "" );
            tableBean.setFlag( "order" );

            k.set(fields[1]);

        }else { //TODO 产品表
            //TODO 切割
            String[] field = line.split( "\t" );
            //TODO 封装key和value
            tableBean.setId( "" );
            tableBean.setPid( field[0] );
            tableBean.setAmount( 0 );
            tableBean.setName( field[1] );

            tableBean.setFlag( "pd" );

            k.set( field[0] );
        }
        context.write( k,tableBean );
    }
}
