package com.irt.rootcloud.realtime.gketl.bolt;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Timer;

import backtype.storm.task.TopologyContext;
import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;

import com.irt.rootcloud.realtime.gketl.model.SchemaModel;
import com.irt.rootcloud.realtime.gketl.util.CopyUtils;
import com.irt.rootcloud.realtime.gketl.util.Utils;
/**
 * 将获取的数据根据从redis中取得的schema 类型进行数据分类
 * <p>Title:</p>
 * <p>Description:</p>
 * <p>Company:</p>
 * @author Ken.zhang
 * @date 2017-1-18 下午2:28:05
 */
public class SubEtlBolt extends BaseBasicBolt{
	
	/**
	 * 
	 */
	private static final long serialVersionUID = -6819502865042976541L;
	private volatile List<SchemaModel> list = null;
	private Properties properties;
	private Connection conn = null;
	private PreparedStatement ps_res = null;
	private static java.util.Timer timer;

	@Override
	public void cleanup() {
		// TODO Auto-generated method stub
		super.cleanup();
	}

	public SubEtlBolt(Properties properties){
		this.properties = properties;
	}
	@Override
	public void prepare(Map stormConf, TopologyContext context) {
		// TODO Auto-generated method stub
		super.prepare(stormConf, context);
		//初始化数据库连接
		initDbCon();
		//初始
		querySchema();
		//数据库更新检查
		checkStatus();
	}
	
	private void initDbCon(){
		try {
			String Db_Driver = properties.getProperty("Db_Driver");
			String Db_Url = properties.getProperty("Db_Url");
			String Db_UserName = properties.getProperty("Db_UserName");
			String Db_Password = properties.getProperty("Db_Password");
			Class.forName(Db_Driver);
			conn = DriverManager.getConnection(Db_Url, Db_UserName, Db_Password);
			String sql_res = "select sn as subtype,cls as cols,fil as cfilter from rd_model where st = 1";
			ps_res = conn.prepareStatement(sql_res);
		}catch(Exception e){
			e.printStackTrace();
		}
			 
	}
	private void querySchema(){
		try {
			synchronized(this){
			SchemaModel sm = null;
			list = new ArrayList();
			ResultSet rs = ps_res.executeQuery();
			while(rs.next()){
				sm = new SchemaModel();
				sm.setTopicname(rs.getString("subtype"));
				sm.setCols(rs.getString("cols"));
				sm.setFilter(rs.getString("cfilter"));
				list.add(sm);
			}
			}
		} catch (SQLException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	private void checkStatus(){
		timer = new Timer();
        timer.schedule(new java.util.TimerTask() {
            public void run() {
            	querySchema();
           }
        }, 0, 3*60*1000);
	}
	public void execute(Tuple tuple, BasicOutputCollector collector) {
		String tennat = (String) tuple.getValueByField(Utils.CHANNEL_TENANT);
		String deviceType = (String) tuple
				.getValueByField(Utils.CHANNEL_DEVICETYPE);
		String dtjson = (String) tuple.getValueByField(Utils.FIELDCHANNEL_BOLT);
		synchronized(this){
		List<SchemaModel> listCopy = CopyUtils.deepCopyList(list);
		
		Values values = new Values(tennat,deviceType,listCopy,dtjson);
		collector.emit(values);
		}
	}
	
	public void declareOutputFields(OutputFieldsDeclarer declare) {
		// TODO Auto-generated method stub
		declare.declare(new Fields(Utils.SUB_CHANNEL_TENANT,Utils.SUB_CHANNEL_DEVICETYPE,Utils.SUB_CHANNEL_FIELDSUB,Utils.SUB_FIELDCHANNEL_BOLT));
	}
}
