package com.yeyaomai.dksns.flowis.quartz;

import java.text.ParseException;
import java.text.SimpleDateFormat;

import javax.annotation.Resource;

import org.springframework.data.mongodb.core.mapreduce.MapReduceOptions;
import org.springframework.data.mongodb.core.mapreduce.MapReduceResults;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;

import com.yeyaomai.dksns.flowis.dao.DetailDao;
import com.yeyaomai.dksns.flowis.dao.KeywordDao;
import com.yeyaomai.dksns.flowis.domain.EngineKeyword;
import com.yeyaomai.dksns.flowis.domain.Engine_out;
import com.yeyaomai.dksns.flowis.domain.Keyword;
import com.yeyaomai.dksns.flowis.domain.Keyword_out;
import com.yeyaomai.dksns.flowis.util.VelocityToolDateUtils;

/**
 * 
 * 定时从mongo取数据，处理后添加入MYSQL
 * 
 * @author zhaocm
 * 
 */
public class KeywordDayExecuto {

	@Resource
	private DetailDao detailDao;

	@Resource
	private KeywordDao keywordDao;

	public void keywordDayTiming() throws ParseException {

		String st = VelocityToolDateUtils.getDay(0);
		String et = VelocityToolDateUtils.getDay(1);
		SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
		
		Query query = new Query();

		keywordDao.deleteKeyword(sdf.parse(st));
		keywordDao.deleteEngineKeyword(sdf.parse(st));

		MapReduceResults<Keyword_out> keyword_outs = groupKeyword(st, et);
		if (keyword_outs != null){
			Keyword keyword = new Keyword();
			for (Keyword_out ko : keyword_outs) {
				keyword = new Keyword();
				keyword = ko.getValue();
				keyword.setKeyword(ko.get_id());
				keyword.setDate(sdf.parse(st));
				keywordDao.insertKeyword(keyword);
				
				MapReduceResults<Engine_out> engine_outs = groupEngine(st, et,ko.get_id());
				if (engine_outs != null){
					EngineKeyword ek = new EngineKeyword();
					for (Engine_out eo : engine_outs) {
						ek = new EngineKeyword();
						ek = eo.getValue();
						ek.setEngineName(eo.get_id());
						ek.setKeyword(ko.get_id());
						ek.setDate(sdf.parse(st));
						keywordDao.insertEngineKeyword(ek);
					}
				}
				detailDao.delete(query, Engine_out.class);
			}
		}
		detailDao.delete(query, Keyword_out.class);

	}

	public MapReduceResults<Keyword_out> groupKeyword(String st, String et) {

		String mapFunction = "function() { "
				+ "emit( this.key_word , {searchCount:1});   }";

		String reduceFunction = "function(key,values){"
				+ "var ret = { keyword : 0,searchCount: 0,uniqueVisitors: 0, ip: 0,newUniqueVisitors:0};"
				+ "for( var i=0; i<values.length; i++){"
				+ "ret.searchCount += values[i].searchCount;" + "} "
				+ "return ret;  }";
		Query query = new Query(Criteria.where("key_word").ne("null").and("access_date").gte(st).lt(et));
		return detailDao.groupFlowByTimeAndFunction(query, mapFunction,
				reduceFunction,
				new MapReduceOptions().outputCollection("keyword_out"),
				Keyword_out.class);
	}

	public MapReduceResults<Engine_out> groupEngine(String st, String et,
			String keyword) {

		String mapFunction = "function() { "
				+ "emit( this.search_engine , {searchCount:1});   }";

		String reduceFunction = "function(key,values){"
				+ "var ret = { engineName : 0,searchCount: 0};"
				+ "for( var i=0; i<values.length; i++){"
				+ "ret.searchCount += values[i].searchCount;" + "} "
				+ "return ret;  }";
		Query query = new Query(Criteria.where("search_engine").ne("null").and("access_date").gte(st).lt(et)
				.and("key_word").is(keyword));
		return detailDao.groupFlowByTimeAndFunction(query, mapFunction,
				reduceFunction,
				new MapReduceOptions().outputCollection("engine_out"),
				Engine_out.class);
	}

}
