package org.anyline.simple.metadata;

import org.anyline.adapter.KeyAdapter;
import org.anyline.data.jdbc.adapter.JDBCAdapter;
import org.anyline.entity.DataRow;
import org.anyline.entity.DataSet;
import org.anyline.metadata.*;
import org.anyline.proxy.ServiceProxy;
import org.anyline.service.AnylineService;
import org.anyline.util.ConfigTable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.ComponentScan;

import java.util.*;

@SpringBootApplication


public class MetadataApplication extends SpringBootServletInitializer {
	private static Logger log = LoggerFactory.getLogger(MetadataApplication.class);
	private static AnylineService service = null;
	private static String seq = null;
	public static void main(String[] args) throws Exception{
		SpringApplication application = new SpringApplication(MetadataApplication.class);
		ConfigurableApplicationContext context = application.run(args);
		ConfigTable.IS_METADATA_AUTO_CHECK_COLUMN_PRIMARY = true;

		service = (AnylineService)context.getBean("anyline.service");
		check(null, "MySQL");
		//check("hive", "Apache Hive");
		//check("hana", "SAP HANA");
		//check("dm8", "达梦8");
		check("pg", "PostgreSQL");
		//check("ms", "SQL Server");
		//check("oracle", "Oracle 11G");

		//check("td", "TDengine");
		//check("db2", "DB2"); nlolkoi

	}
	public static void check(String ds, String title) throws Exception{
		System.out.println("=============================== START " + title + "=========================================");
		if(null != ds) {
			service = ServiceProxy.service(ds);
		}
		seq = null;
		if("oracle".equals(ds)){
			seq = "SIMPLE_SEQ";
			if(service.querys("USER_SEQUENCES","SEQUENCE_NAME:" + seq).size()>0) {
				service.execute("DROP SEQUENCE " + seq);
			}
			String sql = "CREATE SEQUENCE "+seq+" MINVALUE 0 START WITH 0 NOMAXVALUE INCREMENT BY 1 NOCYCLE CACHE 100";

			service.execute(sql);
		}
		ConfigTable.IS_AUTO_CHECK_METADATA = true;
		ConfigTable.IS_METADATA_IGNORE_CASE = false;
		LinkedHashMap<String, Database> dbs = service.metadata().databases();
		//schemas();
		//init("CRM_USER");
		//list();
		//json();
		tables();
		//table();
		//column();
		//view();
		//tag();
		//index();
		//function();
		//exception();
		System.out.println("=============================== END " + title + "=========================================");
	}
	public static void json(){
		String json = "[{\"id\":1, \"userId\":11,\"userName\":\"张三\"},{\"id\":2, \"userId\":22,\"userName\":\"张三三\"}]";
		DataSet users = DataSet.parseJson(KeyAdapter.KEY_CASE.SRC, json)
				.camel_()  //驼峰转下划线
				.setPrimaryKey("id"); //设置主键
		service.save("crm_user", users);

	}
	public static void list(){
		List list = new ArrayList();
		Map m1 = new HashMap();
		m1.put("id",1);
		m1.put("userId",11);
		m1.put("userName", "张三");
		list.add(m1);
		Map m2 = new HashMap();
		m2.put("id",2);
		m2.put("userId",22);
		m2.put("userName", "张三三");
		list.add(m2);
		ConfigTable.IS_UPPER_KEY = false;
		DataSet users = new DataSet(KeyAdapter.KEY_CASE.SRC, list)
				.camel_()  //驼峰转下划线
				.setPrimaryKey("id"); //设置主键
		service.save("crm_user", users);

	}
	public static void schemas(){
		LinkedHashMap<String, Schema> schemas = service.metadata().schemas();
		System.out.println(schemas);
	}
	/**
	 * 初始货表
	 * @param name 表名
	 * @return Table
	 */
	public static Table init(String name) throws Exception{
		//查询表结构
		Table table = service.metadata().table(name, false); //false表示不加载表结构，只简单查询表名
		//如果已存在 删除重键
		if(null != table){
			service.ddl().drop(table);
		}
		table = new Table<>(name).setComment("表备注");
		table.addColumn("ID", "bigint").primary(true).autoIncrement(true).setComment("主键");
		table.addColumn("CODE", "varchar(20)").setComment("编号");
		table.addColumn("NAME", "varchar(50)").setComment("名称");
		table.addColumn("QTY", "int").setComment("数量");
		table.addColumn("O_NAME", "varchar(50)").setComment("原列表");
		table.addColumn("SALARY", "decimal(10,2)").setComment("精度").nullable(false);
		table.addColumn("DEL_COL", "varchar(50)").setComment("删除");
		table.addColumn("CREATE_TIME", "datetime").setComment("创建时间").setDefaultValue(JDBCAdapter.SQL_BUILD_IN_VALUE.CURRENT_DATETIME);
		service.ddl().save(table);
		table = service.metadata().table(name);
		Index index = new Index();
		index.addColumn("SALARY");
		index.addColumn("CODE");
		index.setName("IDX_SALARY_CODE_"+name);
		index.setUnique(true);
		index.setTable(table);
		service.ddl().add(index);
		return table;
	}
	public static void function(){
		LinkedHashMap<String, Function> functions = service.metadata().functions();
		for(Function function:functions.values()){
			System.out.println(function.getName());
			System.out.println(function.getDefinition());
			//因为部分数据库ddl可能有多条，特别是表,所以这里用list
			//执行以下命令后function中ddl属性会被赋值，所以可以直接从function中获取ddl或ddls
			List<String> ddls = service.metadata().ddl(function);
			System.out.println(function.getDdl());
		}
		LinkedHashMap<String, Procedure> procedures = service.metadata().procedures();
		for(Procedure procedure:procedures.values()){
			System.out.println(procedure.getName());
			System.out.println(procedure.getDefinition());
		}
	}
	public static void table() throws Exception{
		System.out.println("--、------------------------------ start  stable  ------------------------------------------");

		ConfigTable.IS_SQL_DELIMITER_OPEN = true;
		Table table = service.metadata().table("hr_department");
		try {
			if(null != table){
				service.ddl().drop(table);
			}
			table = new Table("hr_department");
			table.addColumn("ID", "INT").primary(true).autoIncrement(true);
			table.addColumn("NM", "varchar(50)");
			table.addColumn("REG_TIME", "datetime");
			table.addColumn("DATA_STATUS", "int");
			table.addColumn("QTY", "int");
			service.ddl().create(table);
		}catch (Exception e){
			e.printStackTrace();
		}

		List<String> ddls = service.metadata().ddl("hr_department");
		for(String ddl:ddls){
			System.out.println(ddl);
		}
		DataRow row = new DataRow();
		row.put("NM","TEST");
		row.put("AGE","20");
		if(null != seq){
			row.put("ID", "${"+seq+".NEXTVAL}");
		}

		try {
			//AGE 属性在表中不存在,直接插入会SQL异常
		//	service.insert("hr_department", row);
		}catch (Exception e){
			log.error("AGE 属性在表中不存在,直接插入会SQL异常:"+e.getMessage());
		}

		ConfigTable.IS_AUTO_CHECK_METADATA = true;
		//开启检测后，会先检测表结构，将不表中未出现的列过滤
		row.remove("ID");

		if(null != seq){
			row.put("ID", "${"+seq+".NEXTVAL}");
		}

		service.insert("hr_department", row);
		row.put("TMP_COLUMN","TMP");
		service.save("hr_department", row);


		row.remove("ID");
		if(null != seq){
			row.put("ID", "${"+seq+".NEXTVAL}");
		}

		//相同的表结构会有一段时间缓存，不会每次都读取物理表
		service.insert("hr_department", row);

		row.put("REG_TIME","");						//datetime 类型转换失败会按null处理
		row.put("AGE",1);							//数据库中没有的列 不会参与更新
		row.put("QTY","");							//int 类型转换失败会按null处理
		row.put("DATA_STATUS","1");					//int 类型转换成int
		service.save("hr_department", row);

		DataSet set = new DataSet();
		for(int i=0; i<10; i++){
			DataRow r = new DataRow();
			r.put("NM", "n_"+i);
			set.add(r);
		}
		service.insert("hr_department", set);

		//所有表名,支持模糊匹配
		List<String> tables = service.tables();
		System.out.println(tables);
		tables = service.tables(1);
		System.out.println(tables);
		tables = service.tables("bs_%",1);
		System.out.println(tables);
		tables = service.tables("root","bs_%",1);
		System.out.println(tables);

		//所有表(不包含列、索引等结构)
		LinkedHashMap<String, Table> tbls = service.metadata().tables();
		//表结构(不包含列、索引等结构)
		table = service.metadata().table("hr_department");
		LinkedHashMap<String, Column> columns = table.getColumns();
		System.out.println(table.getName()+" 属性:");
		for(Column column:columns.values()){
			System.out.println("\t"+column.toString());
		}
		columns= service.metadata().columns(tbls.get("HR_DEPARTMENT"));
		for(Column column:columns.values()){
			System.out.println("\t"+column.toString());
		}
		LinkedHashMap<String, Column> pks = table.primarys();

		System.out.println(table.getName()+" 主键:");
		for(Column column: pks.values()){
			System.out.println("\t"+column.toString());
		}

		LinkedHashMap<String, Index> indexes = table.getIndexes();
		for(Index index:indexes.values()){
			System.out.println(table.getName()+"所引:"+index.getName()+ " 类型:"+index.getType()+ " 唯一:"+index.isUnique()+" 包含列:");
			columns = index.getColumns();
			for(Column column:columns.values()){
				System.out.println("\t"+column.toString());
			}
		}
		System.out.println("-------------------------------- end  stable  --------------------------------------------");
	}
	public static void tag() throws Exception{
		System.out.println("-------------------------------- start tag  ----------------------------------------------");
		System.out.println("-------------------------------- end tag  ------------------------------------------------");
	}
	public static void index() throws Exception{
		System.out.println("-------------------------------- start index  --------------------------------------------");
		Table table = service.metadata().table("CRM_USER");

		LinkedHashMap<String,Index> indexes = table.getIndexes();
		//或者直接查询
		indexes = service.metadata().indexes("CRM_USER");
		for(Index item:indexes.values()){
			System.out.println("所引:"+item.getName());
			System.out.println("是否主键:"+item.isPrimary());
			System.out.println("是否物理所引:"+item.isCluster());
			System.out.println("是否唯一:"+item.isUnique());
			LinkedHashMap<String, Column> columns = item.getColumns();
			for(Column column:columns.values()){
				System.out.println("包含列:"+column.getName());
			}
		}
		System.out.println("-------------------------------- end index  ---------------------------------------------");
	}
	public static void view() throws Exception{
		Table table = service.metadata().table("hr_department");
		if(null == table){
			table = new Table("hr_department");
			table.addColumn("ID", "INT").primary(true).autoIncrement(true);
			table.addColumn("NM", "varchar(50)");
			table.addColumn("REG_TIME", "datetime");
			table.addColumn("DATA_STATUS", "int");
			table.addColumn("QTY", "int");
			table.addColumn("REG_DATE", "DATE");
			service.ddl().create(table);
		}
		View view = service.metadata().view("v_hr_department");
		if(null != view){
			service.ddl().drop(view);
		}
		view = new View("v_hr_department");
		view.setDefinition("SELECT * FROM HR_DEPARTMENT");
		view.setComment("视图备注");
		service.ddl().create(view);
		Map<String,View> views = service.metadata().views();
		System.out.println(views);
	}
	public static void tables() throws Exception{
		//init("HR_EMPLOYEE");
		LinkedHashMap<String,Table> ts = service.metadata().tables(null, new Schema("simple"),"test", 1);

		List<Table> list = service.metadata().tables(true, null,  1, 1);
		for(Table table:list){
			System.out.println(table+":"+table.getComment());
		}
		System.out.println("-------------------------------- start tables  --------------------------------------------");
		LinkedHashMap<String,Table> tables = service.metadata().tables();
		for(String key:tables.keySet()){
			Table table = tables.get(key);
			log.warn("table:"+table.getName());
			log.warn("comment:"+table.getComment());
		}

		//当前schema中没有的表 默认查不到
		Table table = service.metadata().table("art_comment");
		if(null != table) {
			System.out.println(table.getCatalog() + ":" + table.getSchema() + ":" + table.getName());
		}
		//当前schema中没有的表 greedy=rue 可以查到其他schema中的表
		table = service.metadata().table(true,"art_comment");
		if(null != table) {
			System.out.println(table.getCatalog() + ":" + table.getSchema() + ":" + table.getName());
		}

		System.out.println("-------------------------------- end tables  ----------------------------------------------");
	}
	public static void column() throws Exception{
		System.out.println("-------------------------------- start column  -------------------------------------------");
		Column col = service.metadata().column("hr_department","ID");
		if(null != col) {
			log.warn("column: ID, type:{}", col.getFullType());
		}
		LinkedHashMap<String,Column> columns = service.metadata().columns("b_array");
		for(Column column: columns.values()){
			log.warn("column:{}\ttype:{}\tauto increment:{}",column.getName(), column.getFullType(), column.isAutoIncrement()==1);
		}
		System.out.println("-------------------------------- end column  --------------------------------------------");
	}
	public static void exception() throws Exception{
		System.out.println("-------------------------------- start exception  ----------------------------------------");
		System.out.println("-------------------------------- end  exception  -----------------------------------------");
	}



}
