package cn.edu.cuit.elena.nati.shell;

import java.io.BufferedReader;
import java.io.IOException;
import java.sql.SQLException;

import cn.edu.cuit.elena.common.Identifiable;
import cn.edu.cuit.elena.common.SackConstants;
import cn.edu.cuit.elena.db.DataSource;
import cn.edu.cuit.elena.db.Schema;
import cn.edu.cuit.elena.db.Schema.Column;
import cn.edu.cuit.elena.db.Schema.Table;
import cn.edu.cuit.elena.hdfs.HDFSManager;
import cn.edu.cuit.elena.trace.Tracer;

/**
 * HiveShell represents for a hive shell object. It contains some specified environment variants,
 * and designated work directory. Generally, it transfers object operation into shell command line.
 * */
public class HiveShell
    extends Shell
{
    /**
     * This is some useful hive commands.
     * <p>
     * To be continue...
     * */
    public static final String LOAD = "LOAD DATA INPATH ";
    public static final String OVERWRITE_INTO_TABLE = " OVERWRITE INTO TABLE ";
    public static final String SELECT = "SELECT ";
    public static final String FROM = " FROM ";
    public static final String CREATE = "CREATE EXTERNAL TABLE ";
    public static final String HIVE = "./hive";
    public static final String EMBEDD = "-e";
    public static final String FEILD_DELIMITER = "ROW FORMAT DELIMITED FIELDS TERMINATED BY "
        + SackConstants.FIELDS_DELIMITER;

    private Identifiable identifiable;
    private Schema schema;

    /**
     * @param identifiabe
     *            should be <code>UserContext</code>.
     * */
    public HiveShell(DataSource dataSource, Identifiable identifiable) throws SQLException
    {
        super( null, SackConstants.HIVE_BIN, new String[] { SackConstants.HADOOP_HOME } );
        this.identifiable = identifiable;
        this.schema = dataSource.loadSchema();
    }

    /**
     * Mostly for testing and inheriting.
     * */
    protected HiveShell()
    {
        super( null, null, null );
    }

    /**
     * Create Hive Table, it is pretty much corresponding to the table in database, only except for
     * that the Hive Table name is a qualified name for the original database table.
     * 
     * <pre>
     *  For example:
     *      There is a table called "TABLE" in a database which named "DATABASE" which 
     *      was imported by user "USER", then the Hive Table name must be :<strong> "USER_DATABASE_TABLE"</strong>
     * </pre>
     * 
     * @throws SQLException
     * @throws IOException
     * */
    public void createTable( Table table ) throws SQLException, IOException
    {

        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append( CREATE );
        stringBuilder.append( identifiable.getIdentifier() + "_" );
        stringBuilder.append( schema.getName() + "_" );
        stringBuilder.append( table.getName() );
        stringBuilder.append( "(" );

        for( String columnName : table.getColumnsOrder() )
        {
            String hiveDataType;
            String dataType = table.getColumns().get( columnName ).getProperty( "TYPE_NAME" );
            if( dataType.equalsIgnoreCase( "int" ) )
            {
                hiveDataType = "INT";
            }
            else if( dataType.equalsIgnoreCase( "varchar" ) )
            {
                hiveDataType = "STRING";
            }
            else
            {
                //TODO other data types...
                throw new RuntimeException()
                {
                    private static final long serialVersionUID = 1L;

                    public String getMessage()
                    {
                        return "HiveShell: line 94 , data type imcompatable";
                    }
                };
            }
            stringBuilder.append( columnName + " " + hiveDataType + "," );
        }

        stringBuilder.deleteCharAt( stringBuilder.length() - 1 );

        stringBuilder.append( ")" );
        stringBuilder.append( FEILD_DELIMITER );

        ///////////////////////////////////
        String relatedHdfsPathForTable = "/" + identifiable.getIdentifier() + "/" + schema.getName() + "/"
            + table.getName();

        stringBuilder.append( " LOCATION '" + HDFSManager.HIVE_ROOT + relatedHdfsPathForTable + "'" );

        /////////////////////////////////////

        String dll = new String( stringBuilder );

        command = new String[] { HIVE, EMBEDD, dll };

        Tracer.debugTrace( getClass(), "HiveShell create table statement : " + command[0] + command[1] + command[2],
            null );

        setCommand( command );
        runCommand();
    }

    public void loadDataFromHDFS( Table table ) throws IOException
    {
        String relatedHdfsPathForTable = "/" + identifiable.getIdentifier() + "/" + schema.getName() + "/"
            + table.getName();
        String hiveTableName = identifiable.getIdentifier() + "_" + schema.getName() + "_" + table.getName();
        if( exsits( HDFSManager.HDFS_ROOT + HDFSManager.HIVE_ROOT + relatedHdfsPathForTable ) )
        {
            loadData( HDFSManager.HIVE_ROOT + relatedHdfsPathForTable, hiveTableName );
        }
        else
        {
            //TODO if the path dose exsit what should i do next?
            Tracer.debugTrace( getClass(), "HiveShell load Data from HDFS : Can load data from hdfs due to "
                + HDFSManager.HDFS_ROOT + HDFSManager.HIVE_ROOT + relatedHdfsPathForTable + " not exsit!", null );
        }
    }

    private void loadData( String hdfsPath, String tableName ) throws IOException
    {

        String[] pathes = HDFSManager.instance().getAllFiles( hdfsPath );

        for( String path : pathes )
        {
            path = HDFSManager.toQualifiedPath( path );
            String[] command = new String[] { HIVE, EMBEDD, LOAD + "'" + path + "'" + OVERWRITE_INTO_TABLE + tableName };
            Tracer.debugTrace( getClass(), "HiveShell load data command : " + command[0] + command[1] + command[2],
                null );

            setCommand( command );
            runCommand();
        }
    }

    private boolean exsits( String path )
    {
        return HDFSManager.instance().exsits( path );
    }

    @Override
    protected String[] getExecString()
    {
        return command;
    }

    @Override
    protected void parseExecResult( BufferedReader lines ) throws IOException
    {
        String line;
        while( ( line = lines.readLine() ) != null )
        {
            //            Tracer.debugTrace( getClass(), line, null );
            System.out.println( line );
        }
    }
}
