package com.jch.dm11_pipeline.example;

import com.jch.dm11_pipeline.reusable.*;
import java.io.File;
import java.io.IOException;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.*;

// 数据同步定时任务源码
public class DataSyncTask implements Runnable {

    @Override
    public void run() {
        ResultSet rs = null;
        SimplePipeline<RecordSaveTask, String> pipeline = buildPipeline();
        pipeline.init(pipeline.newDefaultPipeContext());

        Connection dbConn = null;
        try {
            dbConn = getConnection();
            rs = qryRecords(dbConn);
            processRecords(rs, pipeline);
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            if (null != dbConn) {
                try {
                    dbConn.close();
                } catch (SQLException e) {
                }
            }
        }
        pipeline.shutdown(360, TimeUnit.SECONDS);
    }

    private ResultSet qryRecords(Connection dbConn) throws SQLException {
        dbConn.setReadOnly(true);
        PreparedStatement ps = dbConn.prepareStatement("select id,productId,packageId,msisdn,operationTime,operationType," +
                        "effectiveDate,dueDate from subscriptions order by operationTime",
                ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
        ResultSet rs = ps.executeQuery();
        return rs;
    }

    private static Connection getConnection() throws ClassNotFoundException, SQLException {
        Connection dbConn = null;
        Class.forName("org.hsqldb.jdbc.JDBCDriver");
        dbConn = DriverManager.getConnection("jdbc:hsqldb:hsql://127.0.0.1:9001/viscent-test", "SA", "");
        return dbConn;
    }

    private static Record makeRecordFrom(ResultSet rs) throws SQLException {
        Record record = new Record();
        record.setId(rs.getInt("id"));
        record.setProductId(rs.getString("productId"));
        record.setPackageId(rs.getString("packageId"));
        record.setMsisdn(rs.getString("msisdn"));
        record.setOperationTime(rs.getTimestamp("operationTime"));
        record.setOperationType(rs.getInt("operationType"));
        record.setEffectiveDate(rs.getTimestamp("effectiveDate"));
        record.setDueDate(rs.getTimestamp("dueDate"));
        return record;
    }

    private static class RecordSaveTask {
        public final Record[] records;
        public final int targetFileIndex;
        public final String recordDay;

        public RecordSaveTask(Record[] records, int targetFileIndex) {
            this.records = records;
            this.targetFileIndex = targetFileIndex;
            this.recordDay = null;
        }

        public RecordSaveTask(String recordDay,int targetFileIndex) {
            this.records = null;
            this.targetFileIndex = targetFileIndex;
            this.recordDay = recordDay;
        }
    }

    @SuppressWarnings("unchecked")
    private SimplePipeline<RecordSaveTask, String> buildPipeline() {

        /**
         * 各个Pipe的初始化完全可以在上游Pipe初始化完毕后再初始化其后继Pipe，而不必多个Pipe同时初始化。
         * 这个初始化的动作可以由一个线程来处理。该线程处理完各个Pipe的初始化后，可以继续处理之后可能产生的任务，
         *
         * 这些先后产生的任务可以有线程池中的一个工作者线程从头到尾负责执行。
         */
        final ExecutorService helperExecutor = Executors.newSingleThreadExecutor();
        final SimplePipeline<RecordSaveTask, String> pipeline = new SimplePipeline<RecordSaveTask, String>(helperExecutor);

        // 根据数据库记录生成相应的数据文件
        Pipe<RecordSaveTask, File> stageSaveFile = new AbstractPipe<RecordSaveTask, File>() {
            @Override
            protected File doProcess(RecordSaveTask task) throws PipeException {
                final RecordWriter recordWriter = RecordWriter.getInstance();
                final Record[] records = task.records;
                File file;
                if (null == records) {
                    file = recordWriter.finishRecords(task.recordDay, task.targetFileIndex);
                } else {
                    try {
                        file = recordWriter.write(records, task.targetFileIndex);
                    } catch (IOException e) {
                        throw new PipeException(this, task, "Failed to save records.", e);
                    }
                }
                return file;
            }
        };

        /*
        * 这几个Pipe都是处理I/O的，为了避免使用锁（减少不必要的上下文切换）但又能保证线程安全，故每个Pipe都采用单线程处理。
        *
        * 若各个Pipe要改用线程池来处理，需要注意：1 线程安全 2 死锁
        */
        pipeline.addAsWorkerThreadBasedPipe(stageSaveFile,1);

        final String[][] ftpServerConfigs = retrieveFTPServConf();
        final ThreadPoolExecutor ftpExecutorService = new ThreadPoolExecutor(1,ftpServerConfigs.length,60,TimeUnit.SECONDS,
                new ArrayBlockingQueue<Runnable>(100),new RejectedExecutionHandler(){

            @Override
            public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
                if (!executor.isShutdown()){
                    try{
                        executor.getQueue().put(r);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                }
            }
        });
        // 将生产的数据文件传输到指定的主机上。
        Pipe<File,File> stageTransferFile = new AbstractParallelPipe<File,File, File>(new SynchronousQueue<File>(),ftpExecutorService){

            final Future<FTPClientUtil>[] ftpClientUtilHolders = new Future[ftpServerConfigs.length];

            @Override
            public void init(PipeContext pipeContext) {
                super.init(pipeContext);
                String[] ftpServerConfig;
                for (int i = 0; i < ftpServerConfigs.length; i++) {
                    ftpServerConfig = ftpServerConfigs[i];
                    ftpClientUtilHolders[i] = FTPClientUtil.newInstance(ftpServerConfig[0],ftpServerConfig[1],ftpServerConfig[2]);
                }
            }

            @Override
            protected List<Callable<File>> buildTasks(File file) throws Exception {
                List<Callable<File>> tasks = new LinkedList<Callable<File>>();
                for (Future<FTPClientUtil> ftpClientUtilHolder:ftpClientUtilHolders){
                    tasks.add(new ParallelTask(ftpClientUtilHolder,file));
                }
                return tasks;
            }

            @Override
            protected File combineResults(List<Future<File>> subTaskResults) throws Exception {
                if (0 == subTaskResults.size()){
                    return  null;
                }
                File file = null;
                file = subTaskResults.get(0).get();
                return file;
            }

            @Override
            public void shutdown(long timeout, TimeUnit unit) {
                super.shutdown(timeout, unit);
                ftpExecutorService.shutdown();
                try{
                    ftpExecutorService.awaitTermination(timeout, unit);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
                for (Future<FTPClientUtil> ftpClientUtilHolder:ftpClientUtilHolders){
                    try{
                        ftpClientUtilHolder.get().disconnect();
                    } catch (InterruptedException | ExecutionException e) {
                        e.printStackTrace();
                    }
                }
            }

            class ParallelTask implements Callable<File> {

                public final Future<FTPClientUtil> ftpUtilHodler;
                public final File file2Transfer;

                public ParallelTask(Future<FTPClientUtil> ftpUtilHodler, File file2Transfer) {
                    this.ftpUtilHodler = ftpUtilHodler;
                    this.file2Transfer = file2Transfer;
                }

                @Override
                public File call() throws Exception {
                    File transferedFile = null;
                    ftpUtilHodler.get().upload(file2Transfer);
                    transferedFile = file2Transfer;
                    return transferedFile;
                }
            }
        };

        pipeline.addAsWorkerThreadBasedPipe(stageTransferFile,1);

        // 备份已经传输的数据文件
        Pipe<File,Void> stageBackupFile = new AbstractPipe<File, Void>() {
            @Override
            protected Void doProcess(File input) throws PipeException {
                RecordWriter.backupFile(input);
                return null;
            }

            @Override
            public void shutdown(long timeout, TimeUnit unit) {
                // 所有文件备份完毕后，清理掉空文件夹
                RecordWriter.purgeDir();
            }
        };
        pipeline.addAsWorkerThreadBasedPipe(stageBackupFile,1);
        return pipeline;
    }

    private String[][] retrieveFTPServConf(){
        String[][] ftpServerConfigs = new String[][]{{},{}};
        return ftpServerConfigs;
    }

    private void processRecords(ResultSet rs,Pipeline<RecordSaveTask,String> pipeline) throws Exception{
        Record record;
        Record[] records = new Record[Config.RECORD_SAVE_CHUNK_SIZE];
        int targetFileIndex = 0;
        int nextTargetFileIndex = 0;
        int recordCountInTheDay = 0;
        int recordCountInTheFile = 0;
        String recordDay = null;
        String lastRecordDay = null;
        SimpleDateFormat sdf = new SimpleDateFormat("yyMMdd");
        while (rs.next()){
            record = makeRecordFrom(rs);
            lastRecordDay = recordDay;
            recordDay = sdf.format(record.getOperationTime());
            if (recordDay.equals(lastRecordDay)){
                records[recordCountInTheFile] = record;
                recordCountInTheDay++;
            }else {
                // 实际已发生的不同日期记录文件切换
                if (null != lastRecordDay){
                    if (recordCountInTheFile >= 1){
                        pipeline.process(new RecordSaveTask(Arrays.copyOf(records,recordCountInTheFile),targetFileIndex));
                    }else {
                        pipeline.process(new RecordSaveTask(lastRecordDay,targetFileIndex));
                    }
                    // 在此之前，先将records中的内容写入文件
                    records[0] = record;
                    recordCountInTheFile = 0;
                }else {
                    records[0] = record;
                }
                recordCountInTheDay = 1;
            }
            if (nextTargetFileIndex == targetFileIndex){
               recordCountInTheFile ++;
               if (0 == (recordCountInTheFile % Config.RECORD_SAVE_CHUNK_SIZE)){
                   pipeline.process(new RecordSaveTask(Arrays.copyOf(records,recordCountInTheFile),targetFileIndex));
                   recordCountInTheFile = 0;
               }
            }
            nextTargetFileIndex = (recordCountInTheDay) / Config.MAX_REXCORDS_PER_FILE;
            if (nextTargetFileIndex > targetFileIndex){
                if (recordCountInTheFile > 1){
                    pipeline.process(new RecordSaveTask(Arrays.copyOf(records,recordCountInTheFile),targetFileIndex));
                }else {
                    pipeline.process(new RecordSaveTask(recordDay,targetFileIndex));
                }
                recordCountInTheFile = 0;
                targetFileIndex = nextTargetFileIndex;
            } else if (nextTargetFileIndex < targetFileIndex){
                targetFileIndex = nextTargetFileIndex;
            }
        }
        if (recordCountInTheFile > 0){
            pipeline.process(new RecordSaveTask(Arrays.copyOf(records,recordCountInTheFile),targetFileIndex));
        }
    }

}
