package db.read;

import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.Client;
import org.apache.hadoop.mapred.lib.db.DBWritable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;

public class DBRecord implements Writable, DBWritable{


    private String id;  
    private String name;  
    private String intro;  
    public String getId() {  
        return id;  
    }  
  
    public void setId(String id) {  
        this.id = id;  
    }  
   
     
  
    public String getName() {
		return name;
	}

	public void setName(String name) {
		this.name = name;
	}

	public String getIntro() {
		return intro;
	}

	public void setIntro(String intro) {
		this.intro = intro;
	}

	@Override  
    public void readFields(ResultSet set) throws SQLException {  
		
        this.id = set.getString("id");  
        this.name = set.getString("name");  
        this.intro = set.getString("intro");  
    }  
  
    @Override  
    public void write(PreparedStatement pst) throws SQLException {  
        pst.setString(1, id);  
        pst.setString(2, name);  
        pst.setString(3, intro);  
    }  
  
    @Override  
    public void readFields(DataInput in) throws IOException {  
        this.id = Text.readString(in);    
        this.name = Text.readString(in);  
        this.intro = Text.readString(in);  
    }  
  
    @Override  
    public void write(DataOutput out) throws IOException {  
        Text.writeString(out, this.id);
        Text.writeString(out, this.name);  
        Text.writeString(out, this.intro);  
    }  

    //最终写到hdsf上的 value
    @Override  
    public String toString() {
       // System.out.println(this.id + " " + this.name + " " + this.intro);
        return this.id + " " + this.name + " " + this.intro;
    }  
}  