package cn.edu.cuit.elena.transaction.query;

import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;

import cn.edu.cuit.elena.common.Configuration;
import cn.edu.cuit.elena.common.Identifiable;
import cn.edu.cuit.elena.common.Result;
import cn.edu.cuit.elena.common.SackConstants;
import cn.edu.cuit.elena.db.DataSource;
import cn.edu.cuit.elena.db.DataSourceFactory;
import cn.edu.cuit.elena.hdfs.HDFSManager;
import cn.edu.cuit.elena.nati.shell.SqoopShell;
import cn.edu.cuit.elena.trace.Tracer;
import cn.edu.cuit.elena.transaction.controller.IdentifiedRunnable;
import cn.edu.cuit.elena.transaction.controller.Interpreter;
import cn.edu.cuit.elena.transaction.controller.TaskMonitor;

public class HiveQueryExecutor
{
    private static HiveQueryExecutor hiveQueryExecutor;

    private TaskMonitor taskMonitor;

    private ThreadPoolExecutor server;

    {
        server = (ThreadPoolExecutor) Executors.newCachedThreadPool( Executors.defaultThreadFactory() );
    }

    private HiveQueryExecutor(TaskMonitor taskMonitor)
    {
        this.taskMonitor = taskMonitor;
    }

    public static HiveQueryExecutor getInstance( TaskMonitor taskMonitor )
    {
        return hiveQueryExecutor == null ? new HiveQueryExecutor( taskMonitor ) : hiveQueryExecutor;
    }

    public void doExport( String taskId, String hdfsPath, String username, String password, String url, String driver,
        String tableName ) throws Exception
    {
        HiveExportTaskImp task = new HiveExportTaskImp( taskId, hdfsPath, username, password, url, driver, tableName );
        server.submit( task );
        taskMonitor.monitoringTask( task );
    }

    public void doQuery( String taskId, String hql )
    {
        Tracer.debugTrace( HiveQueryExecutor.class, "Statement : " + hql, null );
        HiveIdentifiableRunnableImp task = new HiveIdentifiableRunnableImp( taskId, hql );
        server.submit( task );
        taskMonitor.monitoringTask( task );
    }

    public class HiveIdentifiableRunnableImp
        implements IdentifiedRunnable
    {
        String identifier;
        String hql;
        ResultSet resultSet;
        volatile boolean isCompleted = false;
        volatile int progress = 0;
        volatile String submitTime;
        volatile String HQL;
        volatile String[] output;

        public HiveIdentifiableRunnableImp(String identifier, String hql)
        {
            this.identifier = identifier;
            this.hql = hql;
            submitTime = String.valueOf( System.currentTimeMillis() );
        }

        @Override
        public String getIdentifier()
        {
            return identifier;
        }

        @Override
        public void run()
        {
            synchronized( this )
            {
                notifyAll();
            }

            Tracer.debugTrace( HiveIdentifiableRunnableImp.class, "Starting query", null );
            try
            {
                HiveBaseDao hiveBaseDao = HiveBaseDao.get();
                resultSet = hiveBaseDao.query( hql );
            }
            catch( Exception e )
            {
                Tracer.fatalTrace( HiveIdentifiableRunnableImp.class, "Fatal Exception : " + e.getMessage(), e );
                progress = -1;
                output = new String[] { e.getMessage() };
                return;
            }
            Tracer.debugTrace( HiveIdentifiableRunnableImp.class, "Completed query", null );
            isCompleted = true;
            progress = 100;
        }

        @Override
        public int getProgression()
        {
            return progress;
        }

        @Override
        public Result getResult()
        {
            return new Result()
            {
                @Override
                public Object getInfomation()
                {
                    return resultSet;
                }
            };
        }

        @Override
        public String[] getOutput()
        {
            if( output == null )
            {
                return new String[] { "" };
            }
            else
            {
                return output;
            }
        }

        @Override
        public boolean isCompleted()
        {
            return isCompleted;
        }

        public String getSubmitTime()
        {
            return submitTime;
        }

        public String getHQL()
        {
            return hql;
        }
    }

    public class HiveExportTaskImp
        implements IdentifiedRunnable
    {
        String hdfsPath;
        String username;
        String password;
        String url;
        String driver;
        String id;
        String taskId;
        Map<String, String> temp;
        String tableName;
        boolean isCompleted = false;
        int progress = 0;
        SqoopShell sqoopShell;

        public HiveExportTaskImp(String taskId, String hdfsPath, String username, String password, String url,
            String driver, String tableName) throws Exception
        {
            this.taskId = taskId;
            this.id = String.valueOf( System.currentTimeMillis() );
            this.hdfsPath = hdfsPath;
            this.username = username;
            this.password = password;
            this.url = url;
            this.driver = driver;
            this.tableName = tableName;
            temp = new HashMap<String, String>();
        }

        @Override
        public String getIdentifier()
        {
            return id;
        }

        @Override
        public void run()
        {

            synchronized( this )
            {
                this.notifyAll();
            }

            temp.put( "username", username );
            temp.put( "password", password );
            temp.put( "url", url );

            try
            {
                Map<String, String> configuration = new HashMap<String, String>();
                configuration.put( SackConstants.USER_NAME, username );
                configuration.put( SackConstants.PASS_WORD, password );
                configuration.put( SackConstants.URL, url );
                configuration.put( SackConstants.DRIVER, driver );

                DataSource dataSource = DataSourceFactory.createDataSourceByConfiguration( Configuration
                    .create( configuration ) );
                sqoopShell = new SqoopShell( dataSource, new Identifiable()
                {
                    @Override
                    public String getIdentifier()
                    {
                        return taskId;
                    }
                } );

                sqoopShell.exportToDatabase( hdfsPath, tableName );
            }
            catch( Exception e )
            {
                Tracer.fatalTrace( HiveExportTaskImp.class, "Hive Export failed : " + e.getMessage(), e );
            }

            isCompleted = true;
            progress = 100;
        }

        @Override
        public int getProgression()
        {
            return progress;
        }

        @Override
        public Result getResult()
        {
            return new Result()
            {
                @Override
                public Object getInfomation()
                {
                    return temp;
                }
            };
        }

        @Override
        public String[] getOutput()
        {
            if( sqoopShell == null )
            {
                return null;
            }
            return sqoopShell.getOutput();
        }

        @Override
        public boolean isCompleted()
        {
            return isCompleted;
        }

    }
}
