package com.mvc.framework.dao;

import java.io.Serializable;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Properties;

import org.apache.log4j.Logger;
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.MappingException;
import org.hibernate.dialect.Dialect;
import org.hibernate.engine.SessionImplementor;
import org.hibernate.engine.TransactionHelper;
import org.hibernate.id.Configurable;
import org.hibernate.id.IdentifierGenerationException;
import org.hibernate.id.PersistentIdentifierGenerator;
import org.hibernate.mapping.Table;
import org.hibernate.type.Type;
import org.hibernate.util.PropertiesHelper;

/**
 * The database primary key generator
 * Parameter as follows：table：table name,column:column nam,cache:the cached size
 * @author pubx
 */
public class TableCachedGenerator extends TransactionHelper
	implements PersistentIdentifierGenerator, Configurable {
	private static final int DEFAULT_MAX_LO = 10000;

	/* COLUMN and TABLE should be renamed but it would break the public API */
	/** The column parameter */
	public static final String COLUMN = "column";
	
	/** Default column name */
	public static final String DEFAULT_COLUMN_NAME = "next_id";
	
	/** The table parameter */
	public static final String TABLE = "table";
	
	/** Default table name */	
	public static final String DEFAULT_TABLE_NAME = "hibernate_unique_key";
	
	public static final int DEFAULT_CACHE_SIZE = 20;
	
	public static final String MAX_LO = "max_lo";
	
	public static final String CACHE = "cache";

	private int hi;
	private int lo;
	private int maxLo;
	private int cache;

	private static final Logger LOGGER = Logger.getLogger(TableCachedGenerator.class);


	private String tableName;
	private String columnName;
	private String query;
	private String update;

	public TableCachedGenerator(){
	}
	
	public TableCachedGenerator(Type type,Properties params, Dialect dialect) {
		configure(type, params, dialect);
	}

	public synchronized Serializable generate(SessionImplementor session, Object object)
		throws HibernateException {
		 if (maxLo < 1) {
			//keep the behavior consistent even for boundary usages
		 	maxLo = ( (Integer) doWorkInNewTransaction(session) ).intValue();
			if (maxLo == 0) ( (Integer) doWorkInNewTransaction(session) ).intValue();
			lo = maxLo - cache + 1;
		}
		if (lo>maxLo) {
			maxLo = ( (Integer) doWorkInNewTransaction(session) ).intValue();
			lo = maxLo - cache + 1;
			LOGGER.debug("new hi value: " + maxLo);
		}
		return hi + lo++;
	}


	public String[] sqlCreateStrings(Dialect dialect) {
		return new String[] {
			dialect.getCreateTableString() + " " + tableName + " ( " + columnName + " " + dialect.getTypeName(Types.INTEGER) 
						+ ",next_object_id " + dialect.getTypeName(Types.INTEGER) + " )",
			"insert into " + tableName + " values ( "  + DEFAULT_MAX_LO + " , "  + DEFAULT_MAX_LO + " )"
		};
	}

	public String[] sqlDropStrings(Dialect dialect) {
		StringBuffer sqlDropString = new StringBuffer( "drop table " );
		if ( dialect.supportsIfExistsBeforeTableName() ) {
			sqlDropString.append( "if exists " );
		}
		sqlDropString.append( tableName ).append( dialect.getCascadeConstraintsString() );
		if ( dialect.supportsIfExistsAfterTableName() ) {
			sqlDropString.append( " if exists" );
		}
		return new String[] { sqlDropString.toString() };
	}

	public Object generatorKey() {
		return tableName;
	}

	public Serializable doWorkInCurrentTransaction(Connection conn, String sql) throws SQLException {
		int result;
		int rows;
		do {
			// The loop ensures atomicity of the
			// select + update even for no transaction
			// or read committed isolation level
			sql = query;
			PreparedStatement qps = conn.prepareStatement(query);
			try {
				ResultSet rs = qps.executeQuery();
				if ( !rs.next() ) {
					String err = "could not read a hi value - you need to populate the table: " + tableName;
					LOGGER.error(err);
					throw new IdentifierGenerationException(err);
				}
				result = rs.getInt(1);
				rs.close();
			}
			catch (SQLException sqle) {
				LOGGER.error("could not read a hi value", sqle);
				throw sqle;
			}
			finally {
				qps.close();
			}

			sql = update;
			PreparedStatement ups = conn.prepareStatement(update);
			try {
				ups.setInt( 1, result + cache );
				ups.setInt( 2, result );
				rows = ups.executeUpdate();
			}
			catch (SQLException sqle) {
				LOGGER.error("could not update hi value in: " + tableName, sqle);
				throw sqle;
			}
			finally {
				ups.close();
			}
		}
		while (rows==0);
		return new Integer(result);
	}

	public void configure(Type type, Properties params, Dialect dialect) throws MappingException {
		tableName = PropertiesHelper.getString(TABLE, params, DEFAULT_TABLE_NAME);
		columnName = PropertiesHelper.getString(COLUMN, params, DEFAULT_COLUMN_NAME);
		cache = PropertiesHelper.getInt(CACHE, params, DEFAULT_CACHE_SIZE);
		String schemaName = params.getProperty(SCHEMA);
		String catalogName = params.getProperty(CATALOG);

		if ( tableName.indexOf( '.' )<0 ) {
			tableName = Table.qualify( catalogName, schemaName, tableName );
		}

		query = "select " + 
			columnName + 
			" from " + 
			dialect.appendLockHint(LockMode.UPGRADE, tableName) +
			dialect.getForUpdateString();

		update = "update " + 
			tableName + 
			" set " + 
			columnName + 
			" = ? where " + 
			columnName + 
			" = ?";
		maxLo = PropertiesHelper.getInt(MAX_LO, params, 0);
		lo = maxLo + 1; // so we "clock over" on the first invocation
    }
}