package cn.edu.cuit.elena.nati.shell;

import java.io.BufferedReader;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Map.Entry;

import cn.edu.cuit.elena.common.Identifiable;
import cn.edu.cuit.elena.common.SackConstants;
import cn.edu.cuit.elena.db.DataSource;
import cn.edu.cuit.elena.db.Schema;
import cn.edu.cuit.elena.db.Schema.Table;
import cn.edu.cuit.elena.trace.Tracer;

public class SqoopShell
    extends Shell
{

    /**
     * Import control arguments. {@link http://archive.cloudera.com/cdh/3/sqoop/SqoopUserGuide.html}
     * 
     * <pre>
     * --append  Append data to an existing dataset in HDFS
     * --as-avrodatafile    Imports data to Avro Data Files
     * --as-sequencefile    Imports data to SequenceFiles
     * --as-textfile    Imports data as plain text (default)
     * --boundary-query <statement>     Boundary query to use for creating splits
     * --columns <col,col,col…>     Columns to import from table
     * --direct     Use direct import fast path
     * --direct-split-size <n>  Split the input stream every n bytes when importing in direct mode
     * --inline-lob-limit <n>   Set the maximum size for an inline LOB
     * -m,--num-mappers <n>     Use n map tasks to import in parallel
     * -e,--query <statement>   Import the results of statement.
     * --split-by <column-name>     Column of the table used to split work units
     * --table <table-name>     Table to read
     * --target-dir <dir>   HDFS destination dir
     * --warehouse-dir <dir>    HDFS parent for table destination
     * --where <where clause>   WHERE clause to use during import
     * -z,--compress    Enable compression
     * --compression-codec <c>  Use Hadoop codec (default gzip)
     * --null-string <null-string>  The string to be written for a null value for string columns
     * --null-non-string <null-string>  The string to be written for a null value for non-string columns
     * </pre>
     * 
     * */
    public static final String IMPORT = "./sqoop import ";
    public static final String CONNECT = " --connect ";
    public static final String USERNAME = " --username ";
    public static final String PASSWORD = " --password ";
    public static final String TABLE = " --table ";
    public static final String LOCATION = " --target-dir ";
    public static final String NON_PK = " -m 1";
    public static final String APPEND = " --append";
    public static final String FIELD_DELIMITER = " --fields-terminated-by ";

    private DataSource dataSource;
    private Identifiable identifier;

    //Might to be used.

    //    public SqoopShell(String command)
    //    {
    //        this( command, null, null );
    //    }
    //
    //    public SqoopShell(String command, String dir, Map<String, String> env)
    //    {
    //
    //        super( command, dir, env );
    //
    //        this.command = command;
    //
    //        if( dir != null )
    //        {
    //            setWorkingDirectory( dir );
    //        }
    //        if( env != null )
    //        {
    //            setEnvironment( env );
    //        }
    //    }

    public SqoopShell(DataSource dataSource, Identifiable identifiable)
    {
        super( null, SackConstants.SQOOP_BIN, new String[] { SackConstants.HADOOP_HOME } );

        this.dataSource = dataSource;
        identifier = identifiable;
    }

    /**
     * Mostly, use to testing.
     * */
    protected SqoopShell()
    {
        this( null, null );
    }

    @Override
    protected String[] getExecString()
    {
        return command;
    }

    @Override
    protected void parseExecResult( BufferedReader lines ) throws IOException
    {
        String line;
        while( ( line = lines.readLine() ) != null )
        {
            //            Tracer.debugTrace( getClass(), line, null );
            System.out.println( line );
        }
    }

    public void importTableToHDFS( String tableName ) throws SQLException, IOException
    {
        Table table = dataSource.loadSchema().getTables().get( tableName );
        importTableToHDFS( table );
    }

    private void setCommoandLine( String... strings )
    {
        this.command = strings;
    }

    public void importTableToHDFS( Table table ) throws SQLException, IOException
    {
        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append( IMPORT );

        //Set JDBC connection
        stringBuilder.append( CONNECT );
        stringBuilder.append( dataSource.getUrl() );

        //Table name to be imported
        stringBuilder.append( TABLE );
        stringBuilder.append( table.getName() );

        //Root dir of HIVE tables in HDFS : /user/quintus/
        stringBuilder.append( LOCATION );
        stringBuilder.append( SackConstants.HDFS_HIVE_TABLES_ROOT_DIR );

        //Child dir, root dir + /username/database name/table name
        stringBuilder.append( "/" + identifier.getIdentifier() );
        stringBuilder.append( "/" + dataSource.loadSchema().getName() + "/" );
        stringBuilder.append( table.getName() );

        //Database username and password
        stringBuilder.append( USERNAME );
        stringBuilder.append( dataSource.getUserName() );
        stringBuilder.append( PASSWORD );
        stringBuilder.append( dataSource.getPassWord() );

        //Non-primary key import model
        stringBuilder.append( NON_PK );

        //Append
        stringBuilder.append( APPEND );

        stringBuilder.append( FIELD_DELIMITER + SackConstants.FIELDS_DELIMITER );

        String commands = new String( stringBuilder );

        setCommoandLine( commands );

        Tracer.debugTrace( getClass(), "SqoopShell Command : " + commands, null );
        runCommand();
    }

    public void importDataSourceToHDFS() throws SQLException, IOException
    {
        Schema schema = dataSource.loadSchema();
        for( Entry<String, Table> entry : schema.getTables().entrySet() )
        {
            Table table = entry.getValue();
            importTableToHDFS( table );
        }
    }

    public void exportToDatabase( String hdfsPath, String tableName ) throws IOException
    {

        StringBuilder stringBuilder = new StringBuilder();
        stringBuilder.append( "./sqoop export --connect " );
        stringBuilder.append( dataSource.getUrl() );
        stringBuilder.append( " --username " );
        stringBuilder.append( dataSource.getUserName() );
        stringBuilder.append( " --password " );
        stringBuilder.append( dataSource.getPassWord() );
        stringBuilder.append( " --table " );
        stringBuilder.append( tableName );
        stringBuilder.append( " --export-dir " );
        stringBuilder.append( hdfsPath );
        stringBuilder.append( " --fields-terminated-by \'" );

        Tracer.debugTrace( SqoopShell.class, "Export command : " + new String( stringBuilder ), null );

        setCommand( new String( stringBuilder ) );
        runCommand();
    }
}
