package com.zis.starter.datasource;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import com.alibaba.druid.pool.DruidDataSource;
import com.zis.starter.core.CustomProperties;
import com.zis.starter.core.SnowflakeIdWorker;
import com.zis.starter.ext.DataCacheInterceptor;
import org.beetl.sql.core.ClasspathLoader;
import org.beetl.sql.core.IDAutoGen;
import org.beetl.sql.core.Interceptor;
import org.beetl.sql.core.UnderlinedNameConversion;
import org.beetl.sql.core.db.MySqlStyle;
import org.beetl.sql.ext.DebugInterceptor;
import org.beetl.sql.ext.spring4.BeetlSqlDataSource;
import org.beetl.sql.ext.spring4.SqlManagerFactoryBean;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.annotation.Order;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;

/**
 * Create by wangshen 2018/1/22
 */
@Configuration
@EnableCaching
public class BeetlSqlConfig {

    private static final Logger logger = LoggerFactory.getLogger(BeetlSqlConfig.class);

    @Autowired
    private CustomProperties customProperties;

    @SuppressWarnings("rawtypes")
    @Bean(name = "sqlManagerFactoryBean")
    @Primary
    public SqlManagerFactoryBean getSqlManagerFactoryBean(@Qualifier("dataSource") DruidDataSource datasource) {
        SqlManagerFactoryBean factory = new SqlManagerFactoryBean();
        BeetlSqlDataSource source = new BeetlSqlDataSource();
        source.setMasterSource(datasource);
        factory.setCs(source);
        factory.setDbStyle(new MySqlStyle());
        // 在这里加入需要进行cache的查询空间
        List<String> cacheList = new ArrayList<>();
        String cacheTables = customProperties.getCacheTables();
        if (cacheTables != null && !cacheTables.equals("")) {
            String[] tables = cacheTables.indexOf(",") > 0 ? cacheTables.split(",") : new String[] { cacheTables };
            for (int i = 0; i < tables.length; i++) {
                cacheList.add(tables[i]);
            }
        }
        factory.setInterceptors(new Interceptor[] { new DebugInterceptor(), new DataCacheInterceptor(cacheList) });
        factory.setNc(new UnderlinedNameConversion());
        factory.setSqlLoader(new ClasspathLoader("/sql"));
        // 在这里注册一个新的主键生成器
        if(customProperties.getWorkerId()==null) {
            throw new RuntimeException("未获取到主键生成规则workerId与datacenterId配置");
        }
        logger.info(" --->> workerId:"+customProperties.getWorkerId()+" -- dataCenterId:"+customProperties.getDataCenterId());
        SnowflakeIdWorker snowflakeIdWorker = new SnowflakeIdWorker(customProperties.getWorkerId(), customProperties.getDataCenterId());
        Map<String, IDAutoGen> idAutoGenMap = new HashMap<>();
        idAutoGenMap.put("pkId", new IDAutoGen() {
            @Override
            public String nextID(String s) {
                return String.valueOf(snowflakeIdWorker.nextId());
            }
        });
        factory.setIdAutoGens(idAutoGenMap);
        return factory;
    }

    @Bean
    public BeetlSqlDataSource beetlSqlDataSource(@Qualifier("dataSource") DruidDataSource datasource){
        BeetlSqlDataSource source = new BeetlSqlDataSource();
        source.setMasterSource(datasource);
        return source;
    }

    @Bean(name = "txManager")
    @Primary
    public DataSourceTransactionManager getDataSourceTransactionManager(@Qualifier("dataSource") DruidDataSource datasource) {
        DataSourceTransactionManager dsm = new DataSourceTransactionManager();
        dsm.setDataSource(datasource);
        return dsm;
    }
}
