package com.pig4cloud.pig.admin.task.base;

import com.alibaba.fastjson.JSONObject;
import com.pig4cloud.pig.admin.api.entity.NewsList;
import com.pig4cloud.pig.admin.mapper.NewsListMapper;
import com.pig4cloud.pig.admin.service.HttpUtilService;
import com.pig4cloud.pig.admin.util.SnowflakeIdGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.connection.stream.StreamRecords;
import org.springframework.data.redis.core.RedisTemplate;
import com.pig4cloud.pig.admin.constant.CommonConstant;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.kafka.core.KafkaTemplate;

public abstract class BaseTask {

	protected Logger logger = LoggerFactory.getLogger(getClass());

	@Autowired
	protected HttpUtilService httpUtilService;

	@Autowired
	protected KafkaTemplate kafkaTemplate;

	@Autowired
	protected StringRedisTemplate redisTemplate;

	@Autowired
	protected NewsListMapper newsListMapper;

	protected SnowflakeIdGenerator idGenerator = new SnowflakeIdGenerator(1, 7);

	/**
	 * 执行定时任务
	 * */
	public abstract void doTask();

	public void insertNews(NewsList news){
		try {
			newsListMapper.insert(news);
		} catch (Exception e) {
			logger.error("数据库中已存在相同新闻", e.getMessage());
		}

	}

	/*public void pushNewsToRedisList(NewsList news){
		// 使用 RedisTemplate 推入 Streams
		redisTemplate.opsForStream().add(StreamRecords.newRecord()
				.ofObject(news.serialize())
				.withStreamKey(CommonConstant.STREAM_KEY));
	}*/

	public void pushNewsToRedis(NewsList news){
		// 使用 RedisTemplate 推入 list
		//redisTemplate.opsForList().rightPush(RedisConsumer.NEWS_LIST_KEY, JSONObject.toJSONString(news));
		kafkaTemplate.send("news_gather", JSONObject.toJSONString(news));
	}
}
