package cn.sh.library.mgts.service.impl;

import java.io.IOException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.UUID;

import javax.sql.DataSource;

import org.elasticsearch.action.bulk.BulkProcessor;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;

import com.google.gson.Gson;

import cn.sh.library.mgts.bean.DataSyncLog;
import cn.sh.library.mgts.common.CommonUtil;
import cn.sh.library.mgts.common.Constant;
import cn.sh.library.mgts.service.AsyncService;
import cn.sh.library.mgts.service.ConfigService;
import cn.sh.library.mgts.service.IndexService;
import cn.sh.library.mgts.utils.SQLHelper;

@Service
public class AsyncServiceImpl implements AsyncService {

	private static Logger logger = LoggerFactory.getLogger(AsyncServiceImpl.class);
	private static Logger loggerBulk = LoggerFactory.getLogger(Constant.LOGGER_BULK_NAME);

	@Autowired
	ConfigService configService;

	@Autowired
	DataSource bibDataSource;

	@Autowired()
	@Qualifier("itemDataSource")
	DataSource itemDataSource;

	@Autowired
	RestHighLevelClient restHighLevelClient;

	@Autowired
	IndexService indexService;

	@Autowired
	RestHighLevelClient highLevelClient;

	@Autowired
	@Qualifier("bulkProcessor")
	BulkProcessor bulkProcessor;

	@Override
	@Async("asyncServiceExecutor") // 异步
	public void bulkBibData(Integer batchth) {
		Connection conn = null;
		List<Map<String, String>> list = null;
		Date date1 = new Date();
		boolean isSucc = false;
		try {
			// 链接配置 参数修改application.yml
			conn = bibDataSource.getConnection();
			// 获取每条的整批数据
			String sqlString = "SELECT  a.bib# as bibno ,tag ,tagord ,indicators ,text, b.cat_type_id as marcType,b.status,b.staff_only\r\n"
					+ "  FROM [olapCirc].[dbo].[_hor_bib] a\r\n"
					+ "  inner join _hor_bib_control b on a.bib# = b.bib#\r\n" + "  where a.bib# > (" + batchth
					+ "-1) * 1000 and a.bib# <= " + batchth + " * 1000  AND b.staff_only!=1 order by a.bib#";
			list = SQLHelper.getResultList(conn, sqlString);
			isSucc = true;
		} catch (SQLException e) {
			e.printStackTrace();
			loggerBulk.error("SQLException:" + e);
		} finally {
			SQLHelper.closeConnection(conn);
		}
		
		bulkBibData(list);
		Date date2 = new Date();
		loggerBulk.info("batchth:" + batchth + ";time:" + (date2.getTime() - date1.getTime()) + ";success:" + isSucc);
	}
	
	@Override
	public boolean bibDataByNo(String bibno) {
		Connection conn = null;
		List<Map<String, String>> list = null;
		try {
			// 链接配置 参数修改application.yml
			conn = bibDataSource.getConnection();
			// 获取每条的整批数据
			String sqlString = "SELECT  a.bib# as bibno ,tag ,tagord ,indicators ,text, b.cat_type_id as marcType,b.status,b.staff_only\r\n"
					+ "  FROM [olapCirc].[dbo].[_hor_bib] a\r\n"
					+ "  inner join _hor_bib_control b on a.bib# = b.bib#\r\n" + "  where a.bib#='" + bibno + "'  AND b.staff_only!=1 order by a.bib#";
			list = SQLHelper.getResultList(conn, sqlString);
		} catch (SQLException e) {
			e.printStackTrace();
			loggerBulk.error("SQLException:" + e);
		} finally {
			SQLHelper.closeConnection(conn);
		}
		
		bulkBibData(list);
		
		return list.size() > 0 && !CommonUtil.isBibBlocked(bibno);
	}
	
	// 删除索引操作
	@Override
	public void deleteBib(String startTime, String endTime) {
		Connection conn = null;// 数据库连接
		List<Map<String, String>> deleteList = null;// 数据库返回数据
		DeleteRequest delIndex = null;// 修改-删除
		
		try {
			conn = bibDataSource.getConnection();// 链接数据库
			String deleteSqlString = "SELECT distinct bib# as bibNo \r\n"
					+ "FROM [olapCirc].[dbo].[_hor_bib_update] c \r\n"
					+ "WHERE change_action ='delete'"
					+ " 	and c.change_datetime>= '"+ startTime +"' and c.change_datetime < '" + endTime + "'";
			deleteList = SQLHelper.getResultList(conn, deleteSqlString);

			for (Map<String, String> map : deleteList) {
				String bibNo = map.get("bibNo");//索引号
				delIndex = new DeleteRequest(Constant.BIB_INDEX_NAME, Constant.BIB_INDEX_ID_PREFIX + bibNo); // 参数 索引,id
				bulkProcessor.add(delIndex);// 执行删除 已测有效
			}
			bulkProcessor.flush();
		} catch (SQLException e) {
			e.printStackTrace();
		} finally {
			SQLHelper.closeConnection(conn);
		}
	}

	// 重新生成索引
	public void bulkBibData(List<Map<String, String>> updateList) {
		IndexRequest addIndex = null;// 修改-增加

		addIndex = new IndexRequest().index("{indexname}")
				.id("{docid}").source("{source}", XContentType.JSON);
		bulkProcessor.add(addIndex);
		
		bulkProcessor.add(addIndex);
		bulkProcessor.flush();
	}

}
