package com.paas.demo;

import com.paas.demo.mybatis.mapper.UserInfoMapper;
import com.paas.demo.mybatis.model.UserInfo;
import com.plat.db.PlatShardingAutoConfiguration;
import com.plat.db.keygen.SnowFlakeKeyGenerator;
import com.plat.paas.core.PaasUtilsContextHolder;
import com.plat.paas.core.RedisUtil;
import io.shardingsphere.shardingjdbc.spring.boot.SpringBootConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.ImportAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.jpa.JpaRepositoriesAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.boot.autoconfigure.transaction.jta.JtaAutoConfiguration;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.ImportResource;

import java.util.Date;

@SpringBootApplication
@EnableAutoConfiguration(exclude={  
      JpaRepositoriesAutoConfiguration.class,//禁止springboot自动加载持久化bean
      MongoAutoConfiguration.class,//禁止实例化mongodb
      JtaAutoConfiguration.class,
		SpringBootConfiguration.class
      })  
@ImportResource({"classpath:PaasUtilsContext.xml","classpath*:/thrift/application-thrift-*.xml", "classpath*:/kafka/application-kafka-*.xml"})
@ImportAutoConfiguration({PlatShardingAutoConfiguration.class})
public class PaasDemoApplication{
	
    @Autowired
	UserInfoMapper userInfoMapper;

	private static Logger logger = LoggerFactory.getLogger(PaasDemoApplication.class);
    
	public static void main(String[] args) {
		
		ApplicationContext ctx = SpringApplication.run(PaasDemoApplication.class, args);
		// PaasUtilsContextHolder 使用springboot参数必须设置该环境
		PaasUtilsContextHolder.setContext(ctx);
		//test();
		setWorkerId();
	}
	public static void setWorkerId(){
		//设置雪花算法worker_id
		String PLAT_WORKER_KEY = "PLAT-DEMO:SNOW_FLAKE_KEY_WORKER_ID";
		long workerId = RedisUtil.getIncrement(PLAT_WORKER_KEY);
		if (workerId >= 1024) {
			RedisUtil.deleteItem(PLAT_WORKER_KEY);
			workerId = RedisUtil.getIncrement(PLAT_WORKER_KEY);
		}
		SnowFlakeKeyGenerator.setWorkerId(workerId);
		logger.info("test current worker id : " + workerId);
	}
	
	public void run(String... args) throws Exception {
		boolean work = false;
		if (work) {
			for (int i = 0; i < 4; i++) {
				final int threadcount = i;
				Thread thread = new Thread(new Runnable() {
					
					@Override
					public void run() {
						logger.info("thread " + threadcount +" compled insert userinfo starting...");

						long starttimes = System.currentTimeMillis();
						for (int j = 0; j < 1000000; j++) {
							UserInfo record = new UserInfo();
							record.setAccount("thread "+ threadcount +"create user account " + j);
							record.setPassword("password " + j);
							record.setUserName("user" + j);
							record.setCreateTime(new Date());
							userInfoMapper.insert(record);
						}
						long endtimes = System.currentTimeMillis();
						logger.info("thread " + threadcount +" compled insert userinfo, cost time " + (endtimes - starttimes));
					}
				});
					
				thread.start();
			}
		}
	}
}
