package com.infinitus.autocatlog.task;

import java.io.IOException;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

import javax.persistence.EntityManagerFactory;

import org.apache.http.HttpEntity;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.infinitus.autocatlog.domain.KafkaConfig;
import com.infinitus.autocatlog.domain.MatcherConfig;
import com.infinitus.autocatlog.service.ConfigInfoService;
import com.infinitus.autocatlog.service.KafkaLogService;
import com.infinitus.autocatlog.service.RedisService;

@Component
public class LoadDataTask {
	
	private static final Logger LOG = LoggerFactory.getLogger(LoadDataTask.class);
	
	
	@Autowired
	private RedisService redisService;
	@Autowired
	private MatcherConfig matcherConfig;
	@Autowired
	private KafkaConfig kafkaConfig;
	@Autowired
	private EntityManagerFactory entityManagerFactory;
	@Autowired
	private ConfigInfoService configInfoService;
	@Autowired
	private KafkaLogService kafkaLogService;
	
	public static long OVERTIME_4_REDIS = 86400;
	
	
	@Scheduled(cron = "0 0,30 * * * ?") 
	public void loadQueryType() {
		/*Map<String, Set<String>> pageType = configInfoService.getTypeFromPage();
		for(Entry<String, Set<String>> entry : pageType.entrySet()){
//			System.out.println(entry.getValue());
//			redisService.remove(MatcherConfig.TYPE+entry.getKey());
			for(String page : entry.getValue()){
				redisService.sadd(MatcherConfig.TYPE+entry.getKey(), page); 
			}
//			System.out.println(redisService.smembers((MatcherConfig.TYPE+entry.getKey())));
		}
		LOG.info("[kafka数据清洗模块] 刷新Event_Value类型 ");*/
	}

	

	@Scheduled(cron = "0 0,30 * * * ?") 
	public void loadFieldTypes() {
		loadFieldType(kafkaConfig.getAppProTopic());
		loadFieldType(kafkaConfig.getPcProTopic());
		loadFieldType(kafkaConfig.getAppTestTopic());
		loadFieldType(kafkaConfig.getPcTestTopic());
	}
	/**
	 * 加载各个字段的类型
	 */
	public void loadFieldType(String topic) {
		CloseableHttpClient httpclient = HttpClients.createDefault();
		try{
			HttpGet httpGet = new HttpGet(kafkaConfig.getFiledTypeUrl(topic));
			CloseableHttpResponse response = httpclient.execute(httpGet);
			try {
				if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
					HttpEntity entity = response.getEntity();
					if (entity != null) {
						String res = EntityUtils.toString(entity, "UTF-8");
						if(res!=null && !"".equals(res.trim())){
							JSONObject json = JSONObject.parseObject(res);
							JSONArray types = json.getJSONArray("result");
							for(int i =0; i<types.size(); i++){
								redisService.set(MatcherConfig.FIELD_TYPE+topic+types.getJSONObject(i).getString("name"), types.getJSONObject(i).getString("type"));
							}
						}
					}
				}
			} finally {
				response.close();
			}
		} catch (IOException e) {
			e.printStackTrace();
		} finally {
			try {
				httpclient.close();
			} catch (IOException e) {
				e.printStackTrace();
			}
		}
	}
}
