package com.fangzhu.dao;

import java.math.BigDecimal;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map.Entry;

import org.apache.commons.lang.StringUtils;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.jdbc.core.BatchPreparedStatementSetter;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.support.rowset.SqlRowSet;
import org.springframework.jdbc.support.rowset.SqlRowSetMetaData;
import org.springframework.stereotype.Service;

import com.alibaba.fastjson.JSON;
import com.fangzhu.dto.RiderTrail;

@Service
public class RiderTrailDao {
	// @Resource(name = "phoenixJdbcTemplate")
	// protected JdbcTemplate jdbcTemplate;

	private ApplicationContext ctx = new ClassPathXmlApplicationContext(
			"classpath:/spring/app-context-*.xml");
	private JdbcTemplate jdbcTemplate = (JdbcTemplate) ctx
			.getBean("phoenixJdbcTemplate");

	// win7环境下连接Hadoop的依赖，Linux环境下该段代码不起作用
	static {
		System.setProperty("hadoop.home.dir", "D:/tools/hadoop");
	}

	public void batchInsert() {
		final List<RiderTrail> result = new ArrayList<RiderTrail>();
		for (int i = 1; i < 6; i++) {
			RiderTrail riderTrail = new RiderTrail();
			riderTrail.setUserId(i);
			riderTrail.setTimestamp(System.currentTimeMillis());
			riderTrail.setLeaveTm(123456);
			riderTrail.setLatitude(331212);
			riderTrail.setLongitude(433312);
			riderTrail.setUserName("kobe");
			riderTrail.setOrderId(321);
			riderTrail.setStatus("leave");
			result.add(riderTrail);
			System.out.println(i);
		}
		try {
			System.out.println("start persist ridertrail data.....");
			jdbcTemplate
					.batchUpdate(
							"UPSERT INTO DP.RIDER_TRAIL(USER_ID,TIMESTAMP,USER_NAME,LNG,LAT,ORDER_ID,STATUS,LEAVE_TM) VALUES(?,?,?,?,?,?,?,?)",
							new BatchPreparedStatementSetter() {
								public void setValues(PreparedStatement ps,
										int i) throws SQLException {
									RiderTrail riderTrail = result.get(i);
									ps.setInt(1, riderTrail.getUserId());
									ps.setLong(2, riderTrail.getTimestamp());
									ps.setString(3, riderTrail.getUserName());
									ps.setInt(4, riderTrail.getLongitude());
									ps.setInt(5, riderTrail.getLatitude());
									ps.setInt(6, riderTrail.getOrderId());
									ps.setString(7, riderTrail.getStatus());
									ps.setLong(8, riderTrail.getLeaveTm());
								}

								public int getBatchSize() {
									return result.size();
								}
							});
			System.out.println("end persist ridertrail data.....");
		} catch (Exception e) {
			System.out.println("error");
			System.out.println(e.getMessage());
		}
	}

	public void get() {
		String sql = "select * from REDCLIFF.TEST limit 0";
		SqlRowSet sqlRowSet = this.jdbcTemplate.queryForRowSet(sql);
		SqlRowSetMetaData sqlRsmd = sqlRowSet.getMetaData();
		String[] columnName = sqlRsmd.getColumnNames();
		for (String string : columnName) {
			System.out.println(string);
		}
		List<RiderTrail> result = null;
		List<Object> params = new ArrayList<Object>();
		StringBuffer querySql = new StringBuffer(
				"select * from dp.shop_order limit 3 ");
		// querySql.append(" ");
		// params.add(yesterdayTm);
		try {
			int length = params.size();
			Object[] paramObjects = (Object[]) params
					.toArray(new Object[length]);

			result = jdbcTemplate.query(querySql.toString(), paramObjects,
					new BeanPropertyRowMapper<RiderTrail>(RiderTrail.class));
			System.out.println(JSON.toJSON(result));

		} catch (Exception e) {
			System.out.println("error");
			System.out.println(e.getMessage());
		}
	}

	/**
	 * 批量插入canal数据 支持动态加mysql字段
	 * 
	 * @param table
	 * @param canalRecords
	 */
	public void batchInsertCanalRecord(String table,
			final List<String> canalRecords) {
		// 获取HBase表结构
		String sql = "select * from " + table + " limit 0";
		SqlRowSet sqlRowSet = this.jdbcTemplate.queryForRowSet(sql);
		SqlRowSetMetaData sqlRsmd = sqlRowSet.getMetaData();
		String[] columnArrays = sqlRsmd.getColumnNames();
		List<String> sqlColumnNames = new ArrayList<String>();
		Collections.addAll(sqlColumnNames, columnArrays);
		if (!sqlColumnNames.contains("BINLOGTM")) {
			sqlColumnNames.add("BINLOGTM");
		}
		final List<String> columnNames = new ArrayList<String>();
		columnNames.addAll(sqlColumnNames);

		// 获取records中mysql表结构与Hbase取并集
		for (String string : canalRecords) {
			CanalRecord canalRecord = CanalRecordUtils
					.string2CanalRecord(string);
			HashMap<String, String> columnsType = canalRecord.getColumnsType();
			Iterator<Entry<String, String>> iterator = columnsType.entrySet()
					.iterator();
			while (iterator.hasNext()) {
				Entry<String, String> entry = (Entry<String, String>) iterator
						.next();
				String column = entry.getKey().toUpperCase();
				String columntype = mysqlColumnsType2HBaseType(entry.getValue());
				String dynamicColumn = column + " " + columntype;
				if (!sqlColumnNames.contains(column)
						&& !sqlColumnNames.contains(dynamicColumn)) {
					sqlColumnNames.add(dynamicColumn);
				}
				if (!columnNames.contains(column)) {
					columnNames.add(column);
				}
			}
		}

		// 拼接sql
		String sqlColumnsString = sqlColumnNames.toString().replace("[", "(")
				.replace("]", ")");
		StringBuffer argTag = new StringBuffer();
		for (int i = 0; i < sqlColumnNames.size(); i++) {
			if (i == 0) {
				argTag.append("?");
			} else {
				argTag.append(",?");
			}
		}
		String upsertSQL = "upsert into " + table + sqlColumnsString
				+ " values(" + argTag.toString() + ")";
		System.out.println(upsertSQL);

		// 批量操作
		jdbcTemplate.batchUpdate(upsertSQL, new BatchPreparedStatementSetter() {
			public void setValues(PreparedStatement ps, int i)
					throws SQLException {
				setCanalRecordSQLValue(ps, i, canalRecords, columnNames);
			}

			public int getBatchSize() {
				return canalRecords.size();
			}
		});
	}

	public void batchDeleteCanalRecord(String table,
			final List<String> canalRecords) {
		CanalRecord canalRecord = CanalRecordUtils
				.string2CanalRecord(canalRecords.get(0));
		final List<String> keysList = new ArrayList<String>();
		Collections.addAll(keysList, canalRecord.getKeys().split(","));
		keysList.add("BINLOGTM");
		StringBuffer deleteSqlSB = new StringBuffer();
		deleteSqlSB.append("delete from " + table + " where ");
		for (String key : keysList) {
			if (!"BINLOGTM".equals(key)) {
				deleteSqlSB.append(key + "=? and");
			} else {
				deleteSqlSB.append(" binlogtm<?");
			}
		}
		jdbcTemplate.batchUpdate(deleteSqlSB.toString(),
				new BatchPreparedStatementSetter() {
					public void setValues(PreparedStatement ps, int i)
							throws SQLException {
						setCanalRecordSQLValue(ps, i, canalRecords, keysList);
					}

					public int getBatchSize() {
						return canalRecords.size();
					}

				});
	}

	public PreparedStatement setCanalRecordSQLValue(PreparedStatement ps,
			int i, List<String> canalRecords, List<String> columnNames) {
		CanalRecord canalRecord = CanalRecordUtils
				.string2CanalRecord(canalRecords.get(i));
		HashMap<String, String> afterColumn = canalRecord.getAfterColumn();
		HashMap<String, String> columnTypes = canalRecord.getColumnsType();
		long binlogExecuteTm = canalRecord.getExecuteTime();
		try {
			for (int j = 1; j < columnNames.size() + 1; j++) {
				String columnName = columnNames.get(j - 1).toLowerCase();
				System.out.println(columnName);
				if ("binlogtm".equals(columnName)) {
					ps.setLong(j, binlogExecuteTm);
					continue;
				}
				String columnType = columnTypes.get(columnName).split("\\(")[0]
						.toUpperCase();

				String value;
				if (!afterColumn.containsKey(columnName)) {
					value = null;
				} else {
					value = afterColumn.get(columnName);
				}
				if (StringUtils.isNotBlank(value)) {
					if ("TINYINT".equals(columnType)) {
						ps.setByte(j, Byte.parseByte(value));
					} else if ("SMALLINT".equals(columnType)) {
						ps.setShort(j, Short.parseShort(value));
					} else if ("MEDIUMINT".equals(columnType)
							|| "INT".equals(columnType)
							|| "INTEGER".equals(columnType)) {
						ps.setInt(j, Integer.parseInt(value));
					} else if ("BIGINT".equals(columnType)
							|| "TIMESTAMP".equals(columnType)) {
						ps.setLong(j, Long.parseLong(value));
					} else if ("FLOAT".equals(columnType)) {
						ps.setFloat(j, Float.parseFloat(value));
					} else if ("DOUBLE".equals(columnType)) {
						ps.setDouble(j, Double.parseDouble(value));
					} else if ("DECIMAL".equals(columnType)) {
						ps.setBigDecimal(j, new BigDecimal(value));
					} else if ("DATETIME".equals(columnType)) {
						ps.setString(j, value);
					} else {
						ps.setString(j, value);
					}
				} else {
					ps.setNull(j, Types.NULL);
				}
			}
		} catch (SQLException e) {
			e.printStackTrace();
		}
		return ps;
	}

	public String mysqlColumnsType2HBaseType(String mysqlColumnType) {
		String mysqlType = mysqlColumnType.split("\\(")[0].toUpperCase();
		if ("TINYINT".equals(mysqlType)) {
			return mysqlType;
		} else if ("SMALLINT".equals(mysqlType)) {
			return mysqlType;
		} else if ("MEDIUMINT".equals(mysqlType) || "INT".equals(mysqlType)
				|| "INTEGER".equals(mysqlType)) {
			return "INTEGER";
		} else if ("BIGINT".equals(mysqlType) || "TIMESTAMP".equals(mysqlType)) {
			return "BIGINT";
		} else if ("FLOAT".equals(mysqlType)) {
			return mysqlType;
		} else if ("DOUBLE".equals(mysqlType)) {
			return mysqlType;
		} else if ("DECIMAL".equals(mysqlType)) {
			return mysqlType;
		} else if ("DATETIME".equals(mysqlType)) {
			return mysqlType;
		} else {
			return "VARCHAR";
		}
	}

	public void batchTest() {
		String sql1 = "upsert into redcliff.test(id,name,indate,num) values(21,'fz','2016-05-20 15:25:43',3)";
		String sql2 = "upsert into redcliff.test(id,name,indate,num) values(23,'fz','2016-05-20 15:25:43',3)";
		String sql3 = "delete from redcliff.test where id=21";
		String[] sql=new String[]{sql1,sql2,sql3};
		this.jdbcTemplate.batchUpdate(sql);
	}

	public static void main(String[] args) {
		List<String> records = new ArrayList<String>();
		String str = "{\"afterColumn\":{\"id\":\"98\",\"name\":\"ff\",\"indate\":\"2016-05-16 15:21:43\",\"num\":\"23\",\"salary\":\"6666.00\"},\"beforeColumn\":{},\"database\":\"redcliff\",\"dmlType\":\"insert\",\"executeTime\":1463385768000,\"table\":\"test\",\"updateFields\":\"\",\"columnsType\":{\"id\":\"bigint(11)\",\"name\":\"varchar(128)\",\"indate\":\"datetime\",\"num\":\"int(11)\",\"salary\":\"decimal(10,2)\"},\"keys\":\"id\"}";
		String str1 = "{\"afterColumn\":{\"id\":\"99\",\"name\":\"fgg\",\"indate\":\"2016-05-16 15:21:43\",\"num\":\"23\",\"salary\":\"6666.121\"},\"beforeColumn\":{},\"database\":\"redcliff\",\"dmlType\":\"insert\",\"executeTime\":1463385766000,\"table\":\"test\",\"updateFields\":\"\",\"columnsType\":{\"id\":\"bigint(11)\",\"name\":\"varchar(128)\",\"indate\":\"datetime\",\"num\":\"int(11)\",\"salary\":\"decimal(10,2)\"},\"keys\":\"id\"}";
		records.add(str);
		records.add(str1);
		RiderTrailDao riderTrailDao = new RiderTrailDao();
		// riderTrailDao.batchInsertCanalRecord("redcliff.test", records);
//		riderTrailDao.batchDeleteCanalRecord("redcliff.test", records);
		riderTrailDao.batchTest();
	}

}
