package net.wicp.tams.common.binlog.dump.handlerConsumer;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import com.lmax.disruptor.WorkHandler;

import lombok.extern.slf4j.Slf4j;
import net.wicp.tams.common.apiext.CollectionUtil;
import net.wicp.tams.common.apiext.jdbc.JdbcAssit;
import net.wicp.tams.common.binlog.dump.MainDump;
import net.wicp.tams.common.binlog.dump.bean.Dump;
import net.wicp.tams.common.binlog.dump.bean.DumpEvent;
import net.wicp.tams.common.jdbc.DruidAssit;

/****
 * JDK1.5规范说明： 1.垃圾回收机制可以自动关闭Statement和Connection； 2.Statement关闭会导致ResultSet关闭；
 * 3.Connection关闭不一定会导致Statement关闭。
 * 
 * @author 偏锋书生
 *
 *         2018年4月26日
 */
@Slf4j
public class BaseDataHander implements WorkHandler<DumpEvent> {
	private Connection connection;
	private final Map<String, PreparedStatement> stmtmap = new HashMap<String, PreparedStatement>();
	private final Map<String, String> sqlmap = new HashMap<String, String>();
	private final Map<String, Integer> duanmap = new HashMap<String, Integer>();
	private final String formatestr = "%s.%s";

//	private static final org.slf4j.Logger errorlog = org.slf4j.LoggerFactory.getLogger("errorBinlog");

	public BaseDataHander(Dump[] dumps) throws SQLException {
		this.connection = DruidAssit.getConnection(MainDump.globleDatasourceName);
		for (Dump dump : dumps) {
			String arrayJoin = CollectionUtil.arrayJoin(dump.getNeedCols(), "`,`");
			String sql = String.format("select `%s` %s and %s >=? and %s<=?", arrayJoin, dump.packFromstr(),
					dump.getPrimarys()[0], dump.getPrimarys()[0]);
			String key = String.format(formatestr, dump.getDb(), dump.getTb());
			sqlmap.put(key, sql);
			duanmap.put(key, dump.getNumDuan());
			PreparedStatement statement = this.connection.prepareStatement(sql);
			statement.setFetchSize(dump.getNumDuan());
			stmtmap.put(key, statement);
		}
	}

	@Override
	public void onEvent(DumpEvent event) throws Exception {
		Thread.currentThread().setName("BaseDataHanderThread");
		initConn();
		PreparedStatement stmt = stmtmap.get(String.format(formatestr, event.getDump().getDb(), event.getDump().getTb()));
		JdbcAssit.setPreParam(stmtmap.get(String.format(formatestr, event.getDump().getDb(), event.getDump().getTb())), event.getBeginId(),
				event.getEndId());
		ResultSet rs = stmt.executeQuery();
		List<Map<String, String>> datas = new ArrayList<Map<String, String>>();
		while (rs.next()) {
			Map<String, String> datamap = new HashMap<>();
			for (String colName : event.getDump().getNeedCols()) {
				String valuestr = rs.getString(colName);
				if (valuestr != null) {
					datamap.put(colName, valuestr);
				}
			}
			datas.add(datamap);
		}
		event.setDatas(datas);
		try {
			rs.close();
		} catch (Exception e) {
			log.error("关闭rs失败", e);
		}
	}

	private void initConn() {
		while (true) {
			try {
				if (this.connection == null || connection.isClosed()) { 
					this.connection = DruidAssit.getConnection(MainDump.globleDatasourceName);
					for (String key : stmtmap.keySet()) {
						// 先关闭旧的stmt;
						if (this.stmtmap.get(key) != null && this.stmtmap.get(key).isClosed()) {
							this.stmtmap.get(key).close();
						}
						PreparedStatement statement = this.connection.prepareStatement(this.sqlmap.get(key));
						statement.setFetchSize(duanmap.get(key));
						this.stmtmap.put(key, statement);
					}
				}
				break;
			} catch (Exception e) {
				log.error("数据库连接不上", e);
				try {
					Thread.sleep(1000);
				} catch (Exception e2) {
				}
				continue;
			}
		}
	}

}
