/*
 * Copyright 2015 - 2016 JPolaris.io
 *
 *    Licensed under the Apache License, Version 2.0 (the "License");
 *    you may not use this file except in compliance with the License.
 *    You may obtain a copy of the License at
 *
 *        http://www.apache.org/licenses/LICENSE-2.0
 *
 *    Unless required by applicable law or agreed to in writing, software
 *    distributed under the License is distributed on an "AS IS" BASIS,
 *    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *    See the License for the specific language governing permissions and
 *    limitations under the License.
 */
package com.lenovo.lmrp.server.eventrecorder.repository;

import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;

import javax.inject.Inject;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.datastax.driver.core.BatchStatement;
import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.HostDistance;
import com.datastax.driver.core.KeyspaceMetadata;
import com.datastax.driver.core.Metrics;
import com.datastax.driver.core.PoolingOptions;
import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.ProtocolOptions.Compression;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.ResultSetFuture;
import com.datastax.driver.core.Session;
import com.lenovo.lmrp.base.ParamNameDef;
import com.lenovo.sdc.framework.util.AttributeBean;
import com.lenovo.sdc.framework.util.exception.ErrorCode;
import com.lenovo.sdc.framework.util.exception.TraceableException;

/**
 * 
 * @author wujin
 */
public class CassandraDataSource {
    private static final Logger logger = LoggerFactory.getLogger(CassandraDataSource.class); 
    
    private static final String KEYSPACE_NAME = "lmrp";
    
    private Cluster cluster;
    private Session session;
    private Executor executor;
    private Map<String, PreparedStatement> cachedCQL;
    
    @Inject
    public CassandraDataSource(AttributeBean params) {
        // get cassandra cluster server address list from config file
        String hosts = params.getAttribute(ParamNameDef.DB_HOST, "127.0.0.1");

        try {
            // set connection pool parameter, suggest put these parameter to config file to
            // user can adjust connection number and max reqeust number to obtain best performace
            PoolingOptions poolOp = new PoolingOptions();
            poolOp.setMaxConnectionsPerHost(HostDistance.REMOTE, 5);
            poolOp.setCoreConnectionsPerHost(HostDistance.REMOTE, 5);
            poolOp.setMaxRequestsPerConnection(HostDistance.REMOTE, 256);
            
            // connect cassandra clusuter and open session
            cluster = Cluster.builder().withCompression(Compression.LZ4).withPoolingOptions(poolOp).addContactPoints(hosts.split(",")).build();
            session = cluster.connect();

            // to create keyspace if not found in cassandra,
            // this feature only suit for samll demo system, in a large distribution system, IT admin should set up 
            // cassandra cluster first and config disk and cluster performance paramete.
            //createSchemaWhenNonExist(params);
            
            cachedCQL = new ConcurrentHashMap<>();
            
            executor = Executors.newCachedThreadPool();
        }
        catch (Exception exp) {
            TraceableException.writeLog(ErrorCode.DB_ERROR, "create CassandraDataSource error", exp);
        }
    }
    
    private void createSchemaWhenNonExist(AttributeBean params) throws Exception {
        KeyspaceMetadata keyspaceMetadata= cluster.getMetadata().getKeyspace(KEYSPACE_NAME);
        if (Objects.nonNull(keyspaceMetadata))
            return;
        
        BufferedReader in = null;
        try {
            // read schema from resource file that deployed in jar file
            StringBuilder sb = new StringBuilder(2048);
            in = new BufferedReader(new InputStreamReader(getClass().getResourceAsStream("schema.sql"), "utf-8"));
            String line = null;
            while ((line = in.readLine()) != null) {
                // skip commones and remove space,\r,\r\n on the end of line
                line = line.trim();
                if (line.equals("") || line.startsWith("-- ") || line.startsWith("/*") || line.startsWith("*"))
                    continue;
    
                // replace replication factor and other storage parameter
                line = line.replaceAll("\\$\\{replicationFactor\\}", params.getAttribute(ParamNameDef.DB_DATA_REPLIC, "1"));
                sb.append(" ").append(line);
                
                // ';' is end flag of cql statament
                if (line.endsWith(";")) {
                    sb.append("\r\n");
                    session.execute(sb.toString());
                    sb.setLength(0);
                }
            }
        }
        catch (Exception exp) {
            logger.warn("create schema failed, system try to drop keyspace", exp);
            try {
                session.execute("drop keyspace IF EXISTS lmrp;");
            }
            catch (Exception exp2) {
                logger.warn("clean keyspace failed:" + exp2.getMessage());
            }
        }
        finally {
            if (Objects.nonNull(in))
                in.close();
        }
    }
    
    public void addBatch(BatchStatement batch, String cql, Object... binds) {
        PreparedStatement ps = cachedCQL.get(cql);
        if (Objects.isNull(ps)) {
            ps = session.prepare(cql);
            cachedCQL.put(cql, ps);
        }
        batch.add(ps.bind(binds));
    }
    
    /**
     * execute batch cql async, all command are execute like in one transcation
     * 
     * @param batch
     */
    public void batchExecute(BatchStatement batch) {
        Runnable task = new LogBatchResult(batch);
        ResultSetFuture future = session.executeAsync(batch);
        future.addListener(task, executor);
    }
    
    public ResultSet execute(String cql, Object... binds) {
        PreparedStatement ps = cachedCQL.get(cql);
        if (Objects.isNull(ps)) {
            ps = session.prepare(cql);
            cachedCQL.put(cql, ps);
        }
        
        if (Objects.isNull(binds) || binds.length == 0)
            return session.execute(ps.bind());
        else
            return session.execute(ps.bind(binds));
    }
    
    public Metrics getMetrics() {
        return cluster.getMetrics();
    }
    
    public void close() {
        try {
            if (Objects.nonNull(session))
                session.close();
            
            if (Objects.nonNull(cluster))
                cluster.close();
            
            if (Objects.nonNull(cachedCQL))
                cachedCQL.clear();
            
            logger.info("close cassandra session");
        }
        catch (Exception exp) {
            exp.printStackTrace();
        }
    }
    
    private static class LogBatchResult implements Runnable {
        private BatchStatement batch;
        
        public LogBatchResult(BatchStatement batch) {
            this.batch = batch;
        }
        
        @Override
        public void run() {
            logger.info("commit {} statement", batch.size());
            batch.clear();
        }
    }
}
