package cn.edu.cuit.elena.web.service;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintWriter;
import java.io.Writer;
import java.net.URI;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;

import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

import cn.edu.cuit.elena.common.SackConstants;
import cn.edu.cuit.elena.hdfs.HDFSManager;
import cn.edu.cuit.elena.trace.Tracer;
import cn.edu.cuit.elena.transaction.controller.IdentifiedRunnable;
import cn.edu.cuit.elena.transaction.query.HiveQueryExecutor;
import cn.edu.cuit.elena.transaction.query.HiveQueryExecutor.HiveExportTaskImp;
import cn.edu.cuit.elena.transaction.system.UserContext;
import cn.edu.cuit.elena.web.System;
import cn.edu.cuit.elena.web.service.ServiceServlet.ServiceConstants;
import cn.edu.cuit.elena.web.tool.JsonHelper;

public class ExportServlet
    extends HttpServlet
{

    /**
     * 
     */
    private static final long serialVersionUID = 3848806007088160668L;

    /**
     * {userContext:{taskId:taskInfo}}
     * 
     * */
    Map<String, String> taskCache = new HashMap<String, String>();

    /**
     * The doGet method of the servlet. <br>
     * 
     * This method is called when a form has its tag value method equals to get.
     * 
     * @param request
     *            the request send by the client to the server
     * @param response
     *            the response send by the server to the client
     * @throws ServletException
     *             if an error occurred
     * @throws IOException
     *             if an error occurred
     */
    public void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException
    {

        doPost( request, response );
    }

    /**
     * The doPost method of the servlet. <br>
     * 
     * This method is called when a form has its tag value method equals to post.
     * 
     * @param request
     *            the request send by the client to the server
     * @param response
     *            the response send by the server to the client
     * @throws ServletException
     *             if an error occurred
     * @throws IOException
     *             if an error occurred
     */
    public void doPost( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException
    {

        response.setContentType( "text/html" );
        PrintWriter out = response.getWriter();
        String para = request.getParameter( "kind" );
        UserContext userContext = (UserContext) request.getSession().getAttribute( ServiceConstants.USERCONTEXT.mark() );
        if( "export".equals( para ) )
        {
            String username = request.getParameter( "username" );
            String password = request.getParameter( "password" );
            String db = request.getParameter( "db" );
            String url = request.getParameter( "url" );
            String table = request.getParameter( "table" );
            String driver = "";
            String path = request.getParameter( "hdfspath" );
            if( "mysql".equalsIgnoreCase( db ) )
            {
                driver = "com.mysql.jdbc.Driver";
            }
            else if( "oracle".equalsIgnoreCase( db ) )
            {
                driver = "........";
            }
            else if( "db2".equalsIgnoreCase( db ) )
            {
                driver = "........";
            }
            else if( "sqlserver".equalsIgnoreCase( db ) )
            {
                driver = "........";
            }
            else if( "sybase".equalsIgnoreCase( db ) )
            {
                driver = "........";
            }

            HiveQueryExecutor hiveQueryExecutor = System.getHiveQueryExecutor();
            String taskId = String.valueOf( java.lang.System.currentTimeMillis() );
            try
            {
                hiveQueryExecutor.doExport( taskId, path, username, password, url, driver, table );
            }
            catch( Exception e )
            {
                Tracer.fatalTrace( ExportServlet.class, "Export task failed : " + e.getMessage(), e );
                out.write( SackConstants.FAILURE );
                out.flush();
                out.close();
                return;
            }
            out.write( "1" );
            out.flush();
            out.close();
        }
        else if( "tasks".equals( para ) )
        {
            Map<String, IdentifiedRunnable> tasks = System.getSystemTaskMonitor().getTasks();
            Map<String, IdentifiedRunnable> retiredTasks = System.getSystemTaskMonitor().getRetiredTasks();
            List<Map<String, String>> list = new LinkedList<Map<String, String>>();
            for( Entry<String, IdentifiedRunnable> entry : tasks.entrySet() )
            {
                String taskId = entry.getKey();
                IdentifiedRunnable task = entry.getValue();
                if( task instanceof HiveExportTaskImp )
                {
                    Map<String, String> temp = new HashMap<String, String>();
                    Map<String, String> result = (Map<String, String>) task.getResult().getInfomation();
                    temp.put( "username", result.get( "username" ) );
                    temp.put( "password", result.get( "password" ) );
                    temp.put( "url", result.get( "url" ) );
                    temp.put( "taskId", taskId );
                    temp.put( "progress", String.valueOf( task.getProgression() ) );
                    list.add( temp );
                }
            }

            for( Entry<String, IdentifiedRunnable> entry : retiredTasks.entrySet() )
            {
                String taskId = entry.getKey();
                IdentifiedRunnable task = entry.getValue();
                if( task instanceof HiveExportTaskImp )
                {
                    Map<String, String> temp = new HashMap<String, String>();
                    Map<String, String> result = (Map<String, String>) task.getResult().getInfomation();
                    temp.put( "username", result.get( "username" ) );
                    temp.put( "password", result.get( "password" ) );
                    temp.put( "url", result.get( "url" ) );
                    temp.put( "taskId", taskId );
                    temp.put( "progress", String.valueOf( task.getProgression() ) );
                    list.add( temp );
                }
            }

            String json = JsonHelper.getNewInstance().toJson( list );

            if( taskCache.get( userContext.getIdentifier() ) == null
                || !taskCache.get( userContext.getIdentifier() ).equals( json ) )
            {
                Tracer.debugTrace( ExportServlet.class, "console json : " + json, null );
                taskCache.put( userContext.getIdentifier(), json );
                out.write( json );
                out.flush();
                out.close();
            }
            else
            {
                out.close();
            }
        }
        else if( "refresh".equals( para ) )
        {
            taskCache.put( userContext.getIdentifier(), null );
        }
        else if( "exporttolocal".equals( para ) )
        {
            String exportFlag = request.getParameter( "exportflag" );

            if( "1".equals( exportFlag ) )
            {
                String count = request.getParameter( "count" );
                String filePath = request.getParameter( "filepath" );
                String hdfsPath = request.getParameter( "hdfspath" );
                if( count == null || count.equals( "" ) )
                {
                    try
                    {
                        readStream( out, hdfsPath, filePath, userContext );
                        Tracer.debugTrace( ExportServlet.class, "export to local success ", null );
                    }
                    catch( Exception e )
                    {
                        Tracer.fatalTrace( ExportServlet.class, "export to local exception : " + e.getMessage(), e );
                    }
                }
                else
                {
                    try
                    {
                        readStream( out, hdfsPath, filePath, count, userContext );
                        Tracer.debugTrace( ExportServlet.class, "export to local success ", null );
                    }
                    catch( Exception e )
                    {
                        Tracer.fatalTrace( ExportServlet.class, "export to local exception : " + e.getMessage(), e );
                    }
                }
            }
            else if( "2".equals( exportFlag ) )
            {
                String count = request.getParameter( "count" );
                String filePath = request.getParameter( "filepath" );
                String hdfsPath = request.getParameter( "tablepath" );
                String[] strings = HDFSManager.instance().getAllFiles( hdfsPath );
                if( count == null || count.equals( "" ) )
                {
                    try
                    {
                        readStream( out, strings[0], filePath, userContext );
                        Tracer.debugTrace( ExportServlet.class, "export to local success ", null );
                    }
                    catch( Exception e )
                    {
                        Tracer.fatalTrace( ExportServlet.class, "export to local exception : " + e.getMessage(), e );
                    }
                }
                else
                {
                    try
                    {
                        readStream( out, strings[0], filePath, count, userContext );
                        Tracer.debugTrace( ExportServlet.class, "export to local success ", null );
                    }
                    catch( Exception e )
                    {
                        Tracer.fatalTrace( ExportServlet.class, "export to local exception : " + e.getMessage(), e );
                    }
                }
            }
        }
    }

    private void readStream( Writer writer, String hdfsPath, String filePath, String count, UserContext userContext )
        throws Exception
    {
        Configuration configuration = new Configuration();
        FSDataInputStream in = null;
        try
        {
            FileSystem hdfs = FileSystem.get( new URI( "hdfs://localhost:8020/" ), configuration );
            in = hdfs.open( new Path( hdfsPath ) );
            InputStreamReader inputStreamReader = new InputStreamReader( in );
            Writer tempWriter = new FileWriter( new File( filePath ) );

            BufferedReader bufferedReader = new BufferedReader( inputStreamReader );
            BufferedWriter bufferedWriter = new BufferedWriter( tempWriter );
            String line = null;
            bufferedWriter
                .write( "/*\n        Elena Export Tools \n        Power by Apache.Hadoop\n        Export from: "
                    + hdfsPath + "\n        Export rows count: " + count + "\n        Export time: "
                    + new java.util.Date() + "\n        Export user: " + userContext.getUserName()
                    + "\n        Field terminated by: '\n*/\n" );
            int a = 0;
            //            StringBuilder stringBuilder = new StringBuilder();
            while( ( line = bufferedReader.readLine() ) != null && a < Long.parseLong( count.trim() ) )
            {
                a++;
                bufferedWriter.write( line + "\n" );
            }
            //            writer.write( new String( stringBuilder ) );
            //            writer.flush();
            //            writer.close();
            bufferedWriter.flush();
            bufferedReader.close();
        }
        catch( Exception e )
        {
            throw e;
        }
        finally
        {
            IOUtils.closeStream( in );
            writer.write( "1" );
            writer.flush();
            writer.close();
        }

    }

    public void readStream( Writer writer, String hdfsPath, String filePath, UserContext userContext ) throws Exception
    {
        Configuration configuration = new Configuration();
        FSDataInputStream in = null;
        try
        {
            FileSystem hdfs = FileSystem.get( new URI( "hdfs://localhost:8020/" ), configuration );
            in = hdfs.open( new Path( hdfsPath ) );
            InputStreamReader inputStreamReader = new InputStreamReader( in );
            Writer tempWriter = new FileWriter( new File( filePath ) );

            BufferedReader bufferedReader = new BufferedReader( inputStreamReader );
            BufferedWriter bufferedWriter = new BufferedWriter( tempWriter );
            String line = null;
            //            StringBuilder stringBuilder = new StringBuilder();
            bufferedWriter
                .write( "/*\n        Elena Export Tools \n        Power by Apache.Hadoop\n        Export from: "
                    + hdfsPath + "\n        Export rows count: all\n        Export time: " + new java.util.Date()
                    + "\n        Export user: " + userContext.getUserName() + "\n        Field terminated by: '\n*/\n" );
            while( ( line = bufferedReader.readLine() ) != null )
            {
                bufferedWriter.write( line + "\n" );
            }
            //            writer.write( new String( stringBuilder ) );
            //            writer.flush();
            //            writer.close();
            bufferedWriter.flush();
            bufferedReader.close();
        }
        catch( Exception e )
        {
            throw e;
        }
        finally
        {
            IOUtils.closeStream( in );
            writer.write( "1" );
            writer.flush();
            writer.close();
        }
    }

}
