package ReduceJoin;

import org.apache.hadoop.io.Writable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

public class TableBean implements Writable {
    private String order_id;
    private String p_id;
    private int amount;
    //区分两张表
    private String  flag;

    private String name;

    public TableBean() {

    }

    public TableBean(String order_id, String p_id, int amount, String flag, String name) {
        this.order_id = order_id;
        this.p_id = p_id;
        this.amount = amount;
        this.flag = flag;
        this.name = name;
    }

    public String getOrder_id() {
        return order_id;
    }

    public void setOrder_id(String order_id) {
        this.order_id = order_id;
    }

    public String getP_id() {
        return p_id;
    }

    public void setP_id(String p_id) {
        this.p_id = p_id;
    }

    public int getAmount() {
        return amount;
    }

    public void setAmount(int amount) {
        this.amount = amount;
    }

    public String getFlag() {
        return flag;
    }

    public void setFlag(String flag) {
        this.flag = flag;
    }

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    //序列化
    @Override
    public void write(DataOutput dataOutput) throws IOException {
        dataOutput.writeUTF(order_id);
        dataOutput.writeUTF(p_id);
        dataOutput.writeInt(amount);
        dataOutput.writeUTF(name);
        dataOutput.writeUTF(flag);

    }
   //反序列化
    @Override
    public void readFields(DataInput dataInput) throws IOException {
       this.order_id=dataInput.readUTF();
       this.p_id=dataInput.readUTF();
       this.amount=dataInput.readInt();
       this.name=dataInput.readUTF();
       this.flag=dataInput.readUTF();
    }

    @Override
    public String toString() {
        return  order_id + '\t' + name + '\t' +amount;

    }
}
