package com.foreveross.proxyip.core.http.ipdoload;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.Vector;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import javax.annotation.Resource;

import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.springframework.stereotype.Service;

import com.dayatang.domain.InstanceFactory;
import com.foreveross.proxyip.core.BaseCode;
import com.foreveross.proxyip.core.pools.PoolsManager;
import com.foreveross.proxyip.core.pools.PublicPool;
import com.foreveross.proxyip.core.redis.IpStoreMap;
import com.foreveross.proxyip.core.redis.PiRedisExecption;
import com.foreveross.proxyip.core.redis.RedisExecutor;
import com.foreveross.proxyip.core.redis.RedisManager;
import com.foreveross.proxyip.entity.IpWebsiteEntity;
import com.foreveross.proxyip.entity.ProxyIpBean;
import com.foreveross.proxyip.infra.IProxyIpOriginal;
import com.foreveross.proxyip.infra.base.ddd.repository.IBaseRepository;
import com.foreveross.proxyip.util.SerializeUtil;

/**
 * 下载代理ip网站管理
 * 
 * @author lyq
 */
@Service
public class IpDoloadManager {

//	public List <IpWebsiteEntity> ipWebsites = new ArrayList<IpWebsiteEntity>();
	
	
	Logger log = Logger.getLogger(IpDoloadManager.class);
	
	public Pattern webSiteLinePattern = null;

	@Resource(name = "webSiteDoload")
	private IProxyIpOriginal webSiteDoload ; 
	
	RedisManager redisManager = RedisManager.getInstance();
	
	PoolsManager poolsManager = null;
	
	//private List<IProxyIpOriginal> list = new Vector<IProxyIpOriginal>();
	
	private final static int EVERY_EXTRACT_COUNT = 1;//每个抽取地底抽取次数
	
//	public void attach(IProxyIpOriginal website) {
//		list.add(website);
//	}
//
//	public void detach(IProxyIpOriginal website) {
//		list.remove(website);
//	}

	private byte[] websiteKey = null;
	public byte[] getIpWebsitesKey() throws Exception{
		if(websiteKey==null)
			websiteKey = SerializeUtil.serialize("ipWebsites");
		return websiteKey;
	}
	
	
	@SuppressWarnings({ "unchecked", "rawtypes" })
	public List extract(Long num) throws Exception {
		Long websiteCountL = redisManager.len(getIpWebsitesKey());
		if (websiteCountL == null || websiteCountL == 0L) {
			reloadIpWebsites();//加载代理ip网站信息
		}
		
		if (num == 0L) {
			return null;
		}
		
		String webDbScale = BaseCode.getValue("WEB_DB_SCALE");//网站：数据库下载比例
		String [] scale = StringUtils.split(webDbScale,":");
		Integer web = Integer.parseInt(scale[0]);
		Integer db = Integer.parseInt(scale[1]);
		Double webScale = web*1000D/(web+db)/1000d;
		Double dbScale = db*1000D/(web+db)/1000d;
		Integer webNum = ((Double)(num*webScale)).intValue();
		Integer dbNum = ((Double)(num*dbScale)).intValue();
		
		// for(IProxyIpOriginal website:list){
		List <Integer> currI = this.initCurrI(websiteCountL.intValue());
//		List iplist = null;
		IpWebsiteEntity website = null;
//		boolean isContinue = true;
		int count = 1;//循环所有下载现在轮数
		Map <String,ProxyIpBean> proxyIpMap = new HashMap<String, ProxyIpBean>();
		Long currTime = System.currentTimeMillis();
		
		log.info("web extract ip size:"+proxyIpMap.size());
		this.loadByDbPool(proxyIpMap,dbNum);//先从数据池下载数据库的分量，如果数据池不足分量，那么从网上下载补充
		webNum += (dbNum - proxyIpMap.size());
		log.info("db extract ip size:"+proxyIpMap.size());
		log.info("web extract start ");
		
		Integer websiteCount = null;
		while (true) {
			try {
				if(websiteCountL==null){
					break;
				}
				
				int i = new Random().nextInt(currI.size());
				int si = currI.get(i);
				website = (IpWebsiteEntity)redisManager.lindex(getIpWebsitesKey(), si);
				
				if(website==null){
					break;//可能其它应用正在初始化，本次获取ip作废。
				}
				currI.remove(i);
				if(checkAtractTime(si,currTime)){
					 if(currI.size()==0){
							if(count==EVERY_EXTRACT_COUNT){//下载EVERY_EXTRACT_COUNT轮，如果下载ip数量不足，不再下载，退出循环
								break;
							}else{
								currI = this.initCurrI(websiteCountL.intValue());
								count++;
							}
						}
					continue;
				}
				webSiteDoload.extract(proxyIpMap,website,(webNum-proxyIpMap.size())*15/10);
				if(proxyIpMap.size()==webNum){
					break;
				}else if(currI.size()==0){
					if(count==EVERY_EXTRACT_COUNT){//下载EVERY_EXTRACT_COUNT轮，如果下载ip数量不足，不再下载，退出循环
						break;
					}else{
						currI = this.initCurrI(websiteCount);
						count++;
					}
				}
			} catch (Exception e) {
				e.printStackTrace();
				log.error(website.getName() + "获取源代理ip失败!");
				if(count==EVERY_EXTRACT_COUNT){//下载EVERY_EXTRACT_COUNT轮，如果下载ip数量不足，不再下载，退出循环
					break;
				}else{
					currI = this.initCurrI(websiteCount);
					count++;
				}
			} 
		}
		
		/**删除公共池已经存在的ip**/
		this.removeExistIp(proxyIpMap);
		log.info("extract ip size:"+proxyIpMap.size());
//		/************ website下载不足够,从数据库下载 ************/
//		if(num>proxyIpMap.size()){
			//proxyIpOriginal.extract(proxyIpMap,null,(num-proxyIpMap.size())*10);
//		}
		
		
		return new ArrayList(proxyIpMap.values());
//		return iplist;
	}
	
		
	private void removeExistIp(Map<String ,ProxyIpBean> extractMap) throws Exception{
		if(extractMap.size()==0){
			return ;
		}
		
		Set <String> set = extractMap.keySet();
		List<String> tempList = new Vector<String>(set);
		
		for(String ip :tempList){
			if(IpStoreMap.getIntance().containsKey(ip) || getPoolManager().getPublicPool().contains(ip)){
				extractMap.remove(ip);
			}
		}
		
		tempList.clear();
	}
	
	private List<Integer> initCurrI(int size){
		List <Integer>currI = new ArrayList <Integer>();
		for(int i=0;i<size;i++){
			currI.add(i);
		}
		return currI;
	}
	
	/**
	 * 从数据库池加载ip
	 * @param extractMap
	 * @param ipStoreMap
	 * @param num
	 * @throws Exception 
	 */
	private void loadByDbPool(Map<String ,ProxyIpBean> extractMap,Integer num) throws Exception{
		
		poolsManager = getPoolManager();
		List<ProxyIpBean> dbIps = poolsManager.getDbPool().offerIps(num.longValue(), PublicPool.POOL_CODE);
		for(ProxyIpBean proxyIp:dbIps){
			if(extractMap.get(proxyIp.getIp())==null&&!IpStoreMap.getIntance().containsKey(proxyIp.getCompletionIP())){
				extractMap.put(proxyIp.getCompletionIP(), proxyIp);
			}
		}
		
	}

	private PoolsManager getPoolManager() {
		if(poolsManager==null){
			poolsManager = InstanceFactory.getInstance(PoolsManager.class);
		}
		return poolsManager;
	}


	/**
	 * 判断上次验证是否还有效
	 * 
	 * 修改人：fb
	 * 修改日期： 2014-09-23
	 * 描   述：  将原先最后修改时间记录从本地应用记录redis中，以实现集群应用真正下载冷却
	 * 
	 * @param proxyIp
	 * @param currTime
	 * @return
	 * @throws Exception 
	 * @throws PiRedisExecption 
	 */
	private boolean checkAtractTime(Integer i, Long currTime) throws Exception {
		final String prefix = "checkAtractTime_";
		final IpWebsiteEntity webSite = (IpWebsiteEntity)redisManager.lindex(getIpWebsitesKey(),i.intValue());
		
		Long preTime = new RedisExecutor<Long>() {
			@Override
			public Long run() throws Exception {
				return (Long) SerializeUtil.unserialize(this.jedis.get((prefix + webSite.getCode()).getBytes()));
			}
		}.exec();
		
		if (preTime != null) {
			Long separation = currTime - preTime;
			if (separation < webSite.getSplitTime()) {// 上次抓取尚未冷却
				return true;
			}
		}
		
		// 设置该代理ip在池中当前验证时间
		new RedisExecutor<Boolean>() {
			@Override
			public Boolean run() throws Exception {
				this.jedis.set((prefix + webSite.getCode()).getBytes(), SerializeUtil.serialize(System.currentTimeMillis()));
				return null;
			}
		}.exec();
		
		return false;
	}
	
	/**
	 * 从数据库重新加载代理ip网站信息
	 * @throws Exception 
	 */
	public void reloadIpWebsites() throws  Exception{
		this.redisManager.del(this.getIpWebsitesKey());
		IBaseRepository baseRepository = InstanceFactory.getInstance(IBaseRepository.class,"entityHibernateRepository");
		List <IpWebsiteEntity> ipWebsites = baseRepository.queryResult("from IpWebsiteEntity t where t.state=1", null);
		
		for(IpWebsiteEntity website:ipWebsites){//脱离hibernate游离态管理
			addIpWebsite(website);
		}
	}
	
	private Pattern getWebSiteLinePattern(){
		if(webSiteLinePattern==null){
			webSiteLinePattern = Pattern.compile("(.?)\\{(\\w*)\\}", Pattern.CASE_INSENSITIVE);
		}
		return webSiteLinePattern;
	}
	
	public void addIpWebsite(IpWebsiteEntity website) throws  Exception{
		IpWebsiteEntity newBean = (IpWebsiteEntity)BeanUtils.cloneBean(website);
		this.loadRowRegex(getWebSiteLinePattern(), newBean);
		this.redisManager.lPush(getIpWebsitesKey(), SerializeUtil.serialize(newBean));
//		this.ipWebsites.add(newBean);
	}
	
	/**
	 * 页面解释器
	 * @param p
	 * @param newBean
	 */
	private void loadRowRegex(Pattern p,IpWebsiteEntity newBean){
		String rowRegex = newBean.getRowRegex();
		List <String>positions = new ArrayList<String>();
//		ArrayList<String> rowSplits = new ArrayList<String>();
		Matcher matcher = p.matcher(rowRegex);
		while(matcher.find()){
//			rowSplits.add(matcher.group(1));
//			rowRegex = StringUtils.replace(rowRegex, matcher.group(0), "(\\w*)");
			positions.add(matcher.group(1)+matcher.group(2));//记录IpStoreEntity属性在代理ip返回结果结果集中单行记录所在位置
		}
		

		newBean.setIpRowPositions(positions);
//		newBean.setRowSplits(rowSplits);
	}
	
	public static void main(String[] args) {
		Pattern p = Pattern.compile("(.?)\\{(\\w*)\\}", Pattern.CASE_INSENSITIVE);
		Matcher matcher = p.matcher("{ip}:{port}@{}${}#{area}");
		while(matcher.find()){
			System.out.println(matcher.group(1)+matcher.group(2));
		}
	}

//	public List<IpWebsiteEntity> getIpWebsites() {
//		return redisManager;
//	}

}
