/**
 * 
 */
package gopi.labs.database.postgresql;

import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.List;
import java.util.Properties;

import gopi.labs.database.DataStream;
import gopi.labs.database.DataStreamMapping;
import gopi.labs.exceptions.InvalidDataRecord;

/**
 * @author GS4
 * 
 */
public class HsqldbDataStream extends DataStream {

	public HsqldbDataStream(String name) {
		super(name);
	}

	@Override
	public void readParams() throws IOException {
		this.properties = new Properties();
		InputStream is = new FileInputStream("config.properties");
		properties.load(is);

	}

	@Override
	public void create() throws SQLException, InvalidDataRecord {
		Statement stmt = this.connection.createStatement();
		ResultSet rs = stmt
				.executeQuery("select count(*) from metadata.data_stream where name = '"
						+ this.getName()
						+ "' and cloud_name = '"
						+ this.getCloudSpace()
						+ "' and data_model_name = '"
						+ this.getDataModel() + "'");

		if (rs.next() && rs.getInt(1) != 0) {
			throw new InvalidDataRecord(this.getCloudSpace() + "."
					+ this.getDataModel() + "." + this.getName()
					+ " already exists");
		} else {
			rs.close();
			stmt.close();
			stmt = this.connection.createStatement();
			stmt.execute("insert into metadata.data_stream (cloud_name, data_model_name, name, password) values('"
					+ this.getCloudSpace()
					+ "','"
					+ this.getDataModel()
					+ "','"
					+ this.getName()
					+ "','"
					+ this.getPassword()
					+ "')");
			stmt.close();

			insertUpdateAttributes(this.getAttributes());

		}

		List<DataStreamMapping> dataStreamMapping = this.getDataStreamMapping();

		for (int i = 0; i < dataStreamMapping.size(); i++) {

			if (!checkDataStreamMappingExists(dataStreamMapping.get(i))) {
				stmt = this.connection.createStatement();
				stmt.execute("insert into metadata.data_stream_mapping (cloud_name, data_model_name, name,"
						+ "src_column_name, dst_column_name, position) values ("
						+ "'"
						+ this.getCloudSpace()
						+ "',"
						+ "'"
						+ this.getDataModel()
						+ "',"
						+ "'"
						+ this.getName()
						+ "',"
						+ "'"
						+ dataStreamMapping.get(i).getSrcColumnName()
						+ "',"
						+ "'"
						+ dataStreamMapping.get(i).getDstColumnName()
						+ "',"
						+ "'"
						+ dataStreamMapping.get(i).getPosition()
						+ "')");
				stmt.close();

			}
		}
		this.connection.commit();
	}

	private boolean checkDataStreamMappingExists(
			DataStreamMapping dataStreamMapping) throws SQLException {
		Statement stmt = this.connection.createStatement();



		ResultSet rs = stmt
				.executeQuery("select count(*) from metadata.data_stream_mapping where "
						+ "cloud_name = '"
						+ this.getCloudSpace()
						+ "' and "
						+ "data_model_name = '"
						+ this.getDataModel()
						+ "' and "
						+ "name = '"
						+ this.getName()
						+ "' and "
						+ "src_column_name = '"
						+ dataStreamMapping.getSrcColumnName() + "'");

		if (rs.next() && rs.getInt(1) == 1) {
			rs.close();
			stmt.close();
			return true;
		}
		rs.close();
		stmt.close();
		return false;
	}

	@Override
	public void alter() throws SQLException, InvalidDataRecord {

		insertUpdateAttributes(this.getAttributes());
		List<DataStreamMapping> dataStreamMapping = this.getDataStreamMapping();

		Statement stmt;
		stmt = this.connection.createStatement();
		
		stmt.execute("delete from metadata.data_stream_mapping where cloud_name = '" 
				+ this.getCloudSpace() + "' and data_model_name = '" 
				+ this.getDataModel() + "' and name = '" 
				+ this.getName() + "'");
		stmt.close();
		for (int i = 0; i < dataStreamMapping.size(); i++) {

			stmt = this.connection.createStatement();

			try {
				System.out
						.println("insert into metadata.data_stream_mapping (cloud_name, data_model_name, name,"
								+ "src_column_name, dst_column_name, position) values ("
								+ "'"
								+ this.getCloudSpace()
								+ "',"
								+ "'"
								+ this.getDataModel()
								+ "',"
								+ "'"
								+ this.getName()
								+ "',"
								+ "'"
								+ dataStreamMapping.get(i).getSrcColumnName()
								+ "',"
								+ "'"
								+ dataStreamMapping.get(i).getDstColumnName()
								+ "',"
								+ "'"
								+ dataStreamMapping.get(i).getPosition() + "')");
				stmt.execute("insert into metadata.data_stream_mapping (cloud_name, data_model_name, name,"
						+ "src_column_name, dst_column_name, position) values ("
						+ "'"
						+ this.getCloudSpace()
						+ "',"
						+ "'"
						+ this.getDataModel()
						+ "',"
						+ "'"
						+ this.getName()
						+ "',"
						+ "'"
						+ dataStreamMapping.get(i).getSrcColumnName()
						+ "',"
						+ "'"
						+ dataStreamMapping.get(i).getDstColumnName()
						+ "',"
						+ "'"
						+ dataStreamMapping.get(i).getPosition()
						+ "')");
			} catch (SQLException e) {
				if (e.getErrorCode() != -104) throw e;
				System.out.println(e.getMessage());
			}
			stmt.close();

		}

		this.connection.commit();
	}

	@Override
	public void drop() throws SQLException {
		Statement stmt = this.connection.createStatement();

		stmt.execute("delete from metadata.data_stream_attributes where name = '"
				+ this.getName()
				+ "' and data_model_name = '"
				+ this.getDataModel()
				+ "' and cloud_name = '"
				+ this.getCloudSpace() + "'");
		stmt.execute("delete from metadata.data_stream_mapping where "
				+ "data_model_name = '" + this.getDataModel()
				+ "' and cloud_name = '" + this.getCloudSpace() + "'");
		stmt.execute("delete from metadata.data_stream where name = '"
				+ this.getName() + "' and data_model_name = '"
				+ this.getDataModel() + "' and cloud_name ='"
				+ this.getCloudSpace() + "'");

		stmt.close();
		this.connection.commit();
	}

}