package com.zhanghe.study.mapreduce.join;

import org.apache.hadoop.io.Writable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

/**
 * @author zh
 * @date 2021/3/30 11:26
 */
public class TableBean implements Writable {
    private String id; // 订单id
    private String pid; // 商品id
    private String name; // 商品名称
    private int amount; // 数量
    private String flag; // 标志，来源于哪个文件或哪个表

    public TableBean(){
        super();
    }

    public TableBean(String id,String pid,String name,int amount,String flag){
        super();
        this.id = id;
        this.pid = pid;
        this.name = name;
        this.amount = amount;
        this.flag = flag;
    }

    public String getId() {
        return id;
    }

    public void setId(String id) {
        this.id = id;
    }

    public String getPid() {
        return pid;
    }

    public void setPid(String pid) {
        this.pid = pid;
    }

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public int getAmount() {
        return amount;
    }

    public void setAmount(int amount) {
        this.amount = amount;
    }

    public String getFlag() {
        return flag;
    }

    public void setFlag(String flag) {
        this.flag = flag;
    }

    @Override
    public String toString() {
        return id + "\t" + pid + "\t" + name + "\t" + amount;
    }

    @Override
    public void write(DataOutput out) throws IOException {
        out.writeUTF(id);
        out.writeUTF(pid);
        out.writeUTF(name);
        out.writeInt(amount);
        out.writeUTF(flag);
    }

    @Override
    public void readFields(DataInput in) throws IOException {
        id = in.readUTF();
        pid = in.readUTF();
        name = in.readUTF();
        amount = in.readInt();
        flag = in.readUTF();
    }
}
