/*
package com.pie4cloud.pie.job.biz.service.impl;


import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.pie4cloud.pie.jpa.BaseDao;
import com.pie4cloud.pie.jpa.BaseService;
import com.pie4cloud.pie.jpa.page.PageBean;
import com.pie4cloud.pie.jpa.page.PageForm;
import com.pie4cloud.pie.jpa.specification.SimpleSpecificationBuilder;
import com.pie4cloud.pie.jpa.specification.SpecificationOperator;
import com.pie4cloud.pie.bus.api.entity.DataKafConsumer;
import com.pie4cloud.pie.bus.api.feign.RemoteDataKafConsumerService;
import com.pie4cloud.pie.common.core.constant.CacheConstants;
import com.pie4cloud.pie.common.core.constant.SecurityConstants;
import com.pie4cloud.pie.common.core.util.R;

import com.pie4cloud.pie.job.api.entity.JobInfoEntity;
import com.pie4cloud.pie.job.api.entity.KafkaJobRelation;
import com.pie4cloud.pie.job.biz.dao.DispenseDao;
import com.pie4cloud.pie.job.biz.enums.ExecutorBlockStrategyEnum;
import com.pie4cloud.pie.job.biz.service.DispenseService;
import com.pie4cloud.pie.job.biz.service.JobInfoService;
import com.pie4cloud.pie.job.biz.service.KafkaJobRelationService;
import com.pie4cloud.pie.job.biz.service.execute.ExecuteService;
import com.pie4cloud.pie.job.biz.service.execute.impl.ExecuteBaseService;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.stereotype.Service;

*/
/**
 * @program: sod
 * @description:
 * @author: zzj
 * @create: 2019-12-27 16:55
 **//*

@Service
@RequiredArgsConstructor
public class DispenseServiceImpl extends BaseService<DispenseEntity> implements DispenseService {

	@Autowired
	private DispenseDao dispenseDao;

	@Override
	public BaseDao<DispenseEntity> getBaseDao() {
		return dispenseDao;
	}

	@Autowired
	private JobInfoService jobInfoService;
	private final RemoteDataKafConsumerService remoteDataKafConsumerService;
	@Autowired
	private KafkaJobRelationService kafkaJobRelationService;

	@Override
	public DispenseEntity getById(Integer id) {
		return dispenseDao.findById(id).get();
	}

	public PageBean<DispenseEntity> page(Page page, DispenseEntity dispenseEntity) {
		SimpleSpecificationBuilder specificationBuilder = new SimpleSpecificationBuilder();
		if (null != dispenseEntity.getTriggerStatus()) {
			specificationBuilder.add("triggerStatus", SpecificationOperator.Operator.eq.name(), dispenseEntity.getTriggerStatus());
		}
		Specification specification = specificationBuilder.generateSpecification();
		Sort sort = Sort.by(Sort.Direction.DESC, "createTime");
		PageForm pageForm = new PageForm((int) page.getCurrent(), (int) page.getSize());
		PageBean pageBean = this.getPage(specification, pageForm, sort);
		return pageBean;
	}

	public DispenseEntity save(DispenseEntity dispenseEntity) {
		dispenseEntity = dispenseDao.saveNotNull(dispenseEntity);
		if(0==dispenseEntity.getIsRealtime()){
			try {
				DataKafConsumer dataKafConsumer = new DataKafConsumer();
				dataKafConsumer.setWorkNum(4);
				dataKafConsumer.setTopic("DISPENSE_" + dispenseEntity.getDdataId() + "_" + dispenseEntity.getId());
				dataKafConsumer.setKafGroup("DISPENSE");
				dataKafConsumer.setHandler("dispenseKakHandler");
				dataKafConsumer.setServiceName("pie-job-executor");
				dataKafConsumer.setTriggerStatus(dispenseEntity.getTriggerStatus());
				R<DataKafConsumer> dataKafConsumerR = remoteDataKafConsumerService.save(dataKafConsumer, SecurityConstants.FROM_IN);
				if (0 == dataKafConsumerR.getCode()) {
					KafkaJobRelation kafkaJobRelation = new KafkaJobRelation();
					kafkaJobRelation.setId("DISPENSE_TIMING_" + dispenseEntity.getId());
					kafkaJobRelation.setConsId(dataKafConsumerR.getData().getConsId());
					kafkaJobRelationService.save(kafkaJobRelation);
				}
			} catch (Exception e) {
				e.printStackTrace();
			}
			jobInfoService.start(dispenseEntity);
		}else {

				DataKafConsumer dataKafConsumer = new DataKafConsumer();
				dataKafConsumer.setWorkNum(4);
				dataKafConsumer.setTopic(dispenseEntity.getDdataId());
				dataKafConsumer.setKafGroup("DISPENSE_"+dispenseEntity.getId());
				dataKafConsumer.setHandler("dispenseRealTimeKakHandler");
				dataKafConsumer.setServiceName("pie-job-executor");
				dataKafConsumer.setTriggerStatus(dispenseEntity.getTriggerStatus());
				R<DataKafConsumer> dataKafConsumerR = remoteDataKafConsumerService.save(dataKafConsumer, SecurityConstants.FROM_IN);
				if (0 == dataKafConsumerR.getCode()) {
					KafkaJobRelation kafkaJobRelation = new KafkaJobRelation();
					kafkaJobRelation.setId("DISPENSE_REAL_" + dispenseEntity.getId());
					kafkaJobRelation.setConsId(dataKafConsumerR.getData().getConsId());
					kafkaJobRelationService.save(kafkaJobRelation);
				}


		}

		return dispenseEntity;
	}

	@Override
	@CacheEvict(value = "DISPENSE", key = "#dispenseEntity.id")
	public DispenseEntity update(DispenseEntity dispenseEntity) {
		if(0==dispenseEntity.getIsRealtime()) {
			dispenseEntity = dispenseDao.saveNotNull(dispenseEntity);
			try {
				DataKafConsumer dataKafConsumer = new DataKafConsumer();
				dataKafConsumer.setWorkNum(4);
				dataKafConsumer.setTopic("DISPENSE_" + dispenseEntity.getDdataId() + "_" + dispenseEntity.getId());
				dataKafConsumer.setKafGroup("DISPENSE");
				dataKafConsumer.setHandler("dispenseKakHandler");
				dataKafConsumer.setServiceName("pie-job-executor");
				dataKafConsumer.setTriggerStatus(dispenseEntity.getTriggerStatus());
				KafkaJobRelation kafkaJobRelation = kafkaJobRelationService.getById("DISPENSE_TIMING_" + dispenseEntity.getId());
				if (null != kafkaJobRelation) {
					dataKafConsumer.setConsId(dispenseEntity.getConsId());
				}
				R<DataKafConsumer> dataKafConsumerR = remoteDataKafConsumerService.updateById(dataKafConsumer, SecurityConstants.FROM_IN);
				if (0 == dataKafConsumerR.getCode()) {
					kafkaJobRelation = new KafkaJobRelation();
					kafkaJobRelation.setId("DISPENSE_TIMING_" + dispenseEntity.getId());
					kafkaJobRelation.setConsId(dataKafConsumerR.getData().getConsId());
					kafkaJobRelationService.save(kafkaJobRelation);
				}
			} catch (Exception e) {
				e.printStackTrace();
			}
			jobInfoService.start(dispenseEntity);
		}else {
			KafkaJobRelation kafkaJobRelation = kafkaJobRelationService.getById("DISPENSE_TIMING_" + dispenseEntity.getId());
			if (null != kafkaJobRelation) {
				kafkaJobRelationService.delById(kafkaJobRelation.getId());
				remoteDataKafConsumerService.removeById(kafkaJobRelation.getConsId(), SecurityConstants.FROM_IN);
			}
			if(dispenseEntity.getIsHistory()==1){
				try {
					DataKafConsumer dataKafConsumer = new DataKafConsumer();
					dataKafConsumer.setWorkNum(4);
					dataKafConsumer.setTopic("DISPENSE_" + dispenseEntity.getDdataId() + "_" + dispenseEntity.getId());
					dataKafConsumer.setKafGroup("DISPENSE");
					dataKafConsumer.setHandler("dispenseKakHandler");
					dataKafConsumer.setServiceName("pie-job-executor");
					dataKafConsumer.setTriggerStatus(dispenseEntity.getTriggerStatus());
					R<DataKafConsumer> dataKafConsumerR = remoteDataKafConsumerService.updateById(dataKafConsumer, SecurityConstants.FROM_IN);
					if (0 == dataKafConsumerR.getCode()) {
						kafkaJobRelation  = new KafkaJobRelation();
						kafkaJobRelation.setId("DISPENSE_TIMING_" + dispenseEntity.getId());
						kafkaJobRelation.setConsId(dataKafConsumerR.getData().getConsId());
						kafkaJobRelationService.save(kafkaJobRelation);
					}
				} catch (Exception e) {
					e.printStackTrace();
				}

			}
			try {
				DataKafConsumer dataKafConsumer = new DataKafConsumer();
				dataKafConsumer.setWorkNum(4);
				dataKafConsumer.setTopic(dispenseEntity.getDdataId());
				dataKafConsumer.setKafGroup("DISPENSE_"+dispenseEntity.getId());
				dataKafConsumer.setHandler("dispenseRealTimeKakHandler");
				dataKafConsumer.setServiceName("pie-job-executor");
				dataKafConsumer.setTriggerStatus(dispenseEntity.getTriggerStatus());
				R<DataKafConsumer> dataKafConsumerR = remoteDataKafConsumerService.updateById(dataKafConsumer, SecurityConstants.FROM_IN);
				if (0 == dataKafConsumerR.getCode()) {
					kafkaJobRelation = new KafkaJobRelation();
					kafkaJobRelation.setId("DISPENSE_REAL_" + dispenseEntity.getId());
					kafkaJobRelation.setConsId(dataKafConsumerR.getData().getConsId());
					kafkaJobRelationService.save(kafkaJobRelation);
				}
			} catch (Exception e) {
				e.printStackTrace();
			}
		}
		return dispenseEntity;
	}

	public Boolean removeById(Integer id) {

		DispenseEntity dispenseEntity = dispenseDao.findById(id).get();
		if(0==dispenseEntity.getIsRealtime()){
			try {
				KafkaJobRelation kafkaJobRelation = kafkaJobRelationService.getById("DISPENSE_TIMING_" + dispenseEntity.getId());
				Integer cosId = null;
				if (null != kafkaJobRelation) {
					cosId = kafkaJobRelation.getConsId();
					kafkaJobRelationService.delById(kafkaJobRelation.getId());
				}
				remoteDataKafConsumerService.removeById(cosId, SecurityConstants.FROM_IN);
			} catch (Exception e) {
				e.printStackTrace();
			}
			dispenseDao.deleteById(id);
			jobInfoService.stop(id);
		}else {
			KafkaJobRelation kafkaJobRelation = kafkaJobRelationService.getById("DISPENSE_TIMING_" + dispenseEntity.getId());
			Integer cosId = null;
			if (null != kafkaJobRelation) {
				cosId = kafkaJobRelation.getConsId();
				remoteDataKafConsumerService.removeById(cosId, SecurityConstants.FROM_IN);
				kafkaJobRelationService.delById(kafkaJobRelation.getId());
			}
		}

		return true;
	}


}

*/
