package mrdemo011;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;

import org.apache.hadoop.io.Writable;

public class EmpData implements Writable{
	
	//注意这里的“通用类型”的数据，各个字段需要给一个默认初始值（因为在map阶段处理的时候，有的字段没有赋值，而序列号操作不能没有初始值
	
	//员工编号
	private String empno = "";
	//员工姓名
	private String empname = "";
	//部门编号
	private String deptno = "";
	//部门名称
	private String deptname = "";
	
	//数据类型标记位 0 是员工，1是部门
	private int flag;
	
	public String getEmpno() {
		return empno;
	}

	public void setEmpno(String empno) {
		this.empno = empno;
	}

	public String getEmpname() {
		return empname;
	}

	public void setEmpname(String empname) {
		this.empname = empname;
	}

	public String getDeptno() {
		return deptno;
	}

	public void setDeptno(String deptno) {
		this.deptno = deptno;
	}



	public String getDeptname() {
		return deptname;
	}

	public void setDeptname(String deptname) {
		this.deptname = deptname;
	}

	public int getFlag() {
		return flag;
	}

	public void setFlag(int flag) {
		this.flag = flag;
	}

	public EmpData() {
		super();
	}

	@Override
	public void write(DataOutput out) throws IOException {
		out.writeUTF(this.empno);
		out.writeUTF(this.empname);
		out.writeUTF(this.deptno);
		out.writeUTF(this.deptname);
		out.writeInt(this.flag);
	}

	@Override
	public void readFields(DataInput in) throws IOException {
		this.empno =  in.readUTF();
		this.empname = in.readUTF();
		this.deptno = in.readUTF();
		this.deptname = in.readUTF();
		this.flag = in.readInt();
	}

	@Override
	public String toString() {
		return "EmpData [empno=" + empno + ", empname=" + empname + ", deptno="
				+ deptno + ", deptname=" + deptname;
	}

	
}
