package cn.doitedu.rtdw.data_sync;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;

public class Job03_页面信息维表导入hbase {
    public static void main(String[] args) throws Exception {

        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","doitedu:2181");
        Connection hbaseConn = ConnectionFactory.createConnection(conf);

        Table table = hbaseConn.getTable(TableName.valueOf("dim_page_info"));

        BufferedReader br = new BufferedReader(new FileReader("data/page_info.csv"));
        String line;
        while(  (line=br.readLine()) != null ){
            // /content/article/,文章页,内容服务
            String[] split = line.split(",");

            Put putData = new Put(split[0].getBytes());
            putData.addColumn("f".getBytes(),"page_type".getBytes(),split[1].getBytes());
            putData.addColumn("f".getBytes(),"page_service".getBytes(),split[2].getBytes());

            table.put(putData);

        }


        br.close();
        table.close();
        hbaseConn.close();


    }


}
