package io.github.junxworks.qt.modules.data.handler;

import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;

import io.github.junxworks.junx.core.executor.StandardThreadExecutor;
import io.github.junxworks.junx.event.EventChannel;
import io.github.junxworks.junx.event.EventContext;
import io.github.junxworks.junx.event.impl.BatchDisruptorEventChannelHandler;
import io.github.junxworks.qt.config.Params;
import io.github.junxworks.qt.modules.data.service.DataStoreService;

public class DataStoreHandler extends BatchDisruptorEventChannelHandler {
	private static final Logger log = LoggerFactory.getLogger(DataStoreHandler.class);

	@Autowired
	private DataStoreService dataService;

	@Autowired
	@Qualifier("dataStoreExecutor")
	private StandardThreadExecutor dataStoreExecutor;

	private int batchSize = 100;

	public int getBatchSize() {
		return batchSize;
	}

	public void setBatchSize(int batchSize) {
		this.batchSize = batchSize;
	}

	@Override
	public void handleEvents(List<EventContext> events, EventChannel channel) throws Exception {
		final List<Object> objs = events.stream().flatMap(e -> {
			return Stream.of(e.getData(Params.PARAM_STORE_OBJ));
		}).collect(Collectors.toList());
		dataStoreExecutor.execute(new Runnable() {
			@Override
			public void run() {
				try {
					dataService.insertBatch(objs, batchSize);
//					log.info("成功写入" + objs.size() + "条数据");
				} catch (Exception e) {
					log.error("存储数据失败", e);
				}
			}
		});
	}

}
