package com.yihaodian.index.solr.ext;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;

import org.apache.log4j.Logger;
import org.apache.lucene.util.IOUtils;
import org.apache.solr.schema.SchemaField;

/**
 * MemDataProvider, 类似于ExternalFileField的数据文件
 * 
 * 数据：
 * - Key-》Value的原始数据 ​
 * - 有两个Identifier：Index ID + Field Name，同一个节点上的Index中的不同Shard共享同一个Provider。
 *
 * 更新流程：
 * - RPC（或者Msg）参数转换为Index ID，Field Name， Field Value。
 * - 查找到相应的Provider，并更新。
 * - 更新之后数据Async刷新到磁盘文件，刷新文件只需要append，remove为append defaultValue。
 * - TODO: 定期dump数据到文件，dump的时候会去除重复的key。
 * 
 * 初始化加载
 * - MemDataProvider初始化的时候从磁盘文件加载，并且在后台启动Async更新Thread定期执行全部重新加载
 * 
 * Multi-Shard Support
 * 1. 每个Shard启动一个SolrCore，对应的会有一个MemFloatSource。
 * 2. 同一个Index+Field使用同一个MemDataProvider，保存内存中的数据，并且负责刷新到Snapshot文件。
 * 3. MemDataProvider通过DataDir同MemFloatSource链接。
 * 4. SolrCore关闭的时候，需要关闭MemFloatSource。
 * 
 * @author zhouhang
 *
 */
public class MemDataProvider {

	private static Logger log = Logger.getLogger(MemDataProvider.class);
	
	private static final String DEFAULT_DATA_DIR = "/var/www/data/mars/product";

	private static final String SNAPSHOT_DIR = "memdata";
	
	private Map<String, Float> data;

	private Map<String, MemFloatSource> sources;
	
	private Snapshot file;

	// used to save to and load from disk file.
	private String indexName;
	private String fieldName;
	
	private String dataDir;

	public MemDataProvider(String indexName, String fieldName, String dataDir) {
		this.indexName = indexName;
		this.fieldName = fieldName;
		
		this.dataDir = getIndexDir(dataDir) + "/" + SNAPSHOT_DIR;		
		
		this.sources = null;
		
		this.file = new Snapshot();
		Map<String, Float> newData = file.load();
		this.data = newData;
	}
	
	/**
	 * Get Snapshot dir from shard index dir.
	 * 
	 * Assume the indexDir is /${DATA_PATH}/<index-id>/<shard-id>/<index-version>/index,
	 * the return value should be: /${DATA_PATH}/<index-id>/memdata
	 * 
	 * @param indexDirPath
	 * @return
	 */
	private String getIndexDir(String indexDirPath) {
		String ret = DEFAULT_DATA_DIR;
		if(indexDirPath == null) {
			return ret;
		}
		
		File shardIndexDir = new File(indexDirPath);
		if(!shardIndexDir.exists()) {
			return ret;
		}
		
		File shardVersionDir = shardIndexDir.getParentFile();
		if(!shardVersionDir.exists()){
			return ret;
		}
		
		File shardDir = shardVersionDir.getParentFile();
		if(!shardDir.exists()) {
			return ret;
		}
		
		
		File indexDir = shardDir.getParentFile();
		if(indexDir != null) {
			ret = indexDir.getAbsolutePath();
		}
		
		return ret;
	}
	
	public synchronized Float get(String key) {
		return data.get(key);
	}

	public synchronized void put(String key, Float value) {		
		if(!_put(key, value)) {
			return;
		}

		if(sources != null) {
			for(MemFloatSource source : sources.values()) {
				source.refresh(key, value);
			}
		}
		
		file.save(key, value);
	}
	
	public synchronized void putAll(Map<String, Float> update) {
		boolean changed = false;
		for(Entry<String, Float> entry : update.entrySet()) {
			if(_put(entry.getKey(), entry.getValue())) {
				changed = true;
			}		
		}
		
		if(!changed) {
			return;
		}
		
		data.putAll(update);
		if(sources != null) {
			for(MemFloatSource source : sources.values()) {
				for(Entry<String, Float> entry: update.entrySet()) {
					source.refresh(entry.getKey(), entry.getValue());
				}
			}
		}
		
		file.save(update);
	}
	
	private boolean _put(String key, Float value) {
		Float old = data.put(key, value);	
		return (old == null ? value != null : !old.equals(value));
	}

	/**
	 * Swith data with data file in place.
	 * @param version
	 */
	public synchronized void switchData(String version) {		
		// load data from file
		Snapshot newFile = new Snapshot(version);
		Map<String, Float> newData = newFile.load();
		
		data = newData;
		file = newFile;
		
		// refresh sources
		if(sources != null) {
			for(MemFloatSource source : sources.values()) {
				source.loadValues();
			}
		}		
	}
	
	/**
	 * Switch data with data in memory.
	 * @param newData
	 * @param version
	 */
	public synchronized void switchData(Map<String, Float> newData, String version) {
		Map<String, Float> oldData = data;
		data = newData;
		oldData.clear();
		oldData = null;
		
		// refresh sources
		if(sources != null) {
			for(MemFloatSource source : sources.values()) {
				source.loadValues();
			}
		}
		
		// save data to file
		Snapshot newFile = new Snapshot(version);
		newFile.save(newData);
		file = newFile;
	}
	
	/**
	 * Get already created value source
	 * @param dataDir
	 * @return
	 */
	public synchronized MemFloatSource getSource(String dataDir) {
		if(sources == null) {
			sources = new HashMap<String, MemFloatSource>();			
		}
		return sources.get(dataDir);
	}
	
	/**
	 * Close value source if it's opened.
	 * 
	 * This function should and should only be used when solrCore is closed. 
	 * @param dataDir
	 * @return
	 */
	public synchronized void closeSource(String dataDir) {
		if(sources == null) {
			return;						
		}
		sources.remove(dataDir);
	}
	
	/**
	 * Get or create value source
	 * @param indexName
	 * @param field
	 * @param keyField
	 * @param defVal
	 * @param dataDir
	 * @return
	 */
	public synchronized MemFloatSource getSource(String indexName, SchemaField field, SchemaField keyField,
			float defVal, String dataDir) {
		
		MemFloatSource source = getSource(dataDir);
		if(source == null) {
			source = new MemFloatSource(this, indexName, field, keyField, defVal, dataDir);
			sources.put(dataDir, source);
		}
		
		MemDataManager.registerDataDir(dataDir, this);
		
		return source;
	}
	
	@Override
	public String toString() {
		StringBuffer sb = new StringBuffer();
		sb.append("MemDataProvider")
		.append(", index: ").append(indexName)
		.append(", field: ").append(fieldName)
		.append(", data: ").append(data)
		.append(", sources: ");		
				
		if(sources != null) {
			sb.append("[ ");
			for(MemFloatSource source : sources.values()) {
				sb.append(source.toString());
				sb.append(", ");
			}
			sb.append("] ");
		} else {
			sb.append(sources);
		}
		
		return sb.toString();
	}

	/**
	 * Persistent storage for the data.
	 * 
	 * The file will be used when solrCore is restarted.
	 * 
	 * @author zhouhang
	 *
	 */
	private class Snapshot {
		private static final String delimiter = "=";
		
		private String fname; 
		
		private String prefix;
		
		public Snapshot() {
			this(null);
		}
		
		public Snapshot(String version) {
			
			prefix = indexName + "_" + fieldName;
			
			if(version == null) {
				fname = getLatestFile();
			} else {
				fname = prefix + "." + version;
			}
			File f = new File(dataDir, fname);
			if(!f.exists()) {
				try {
					File dir = f.getParentFile();
					if(!dir.exists()) {
						dir.mkdirs();
					}
					f.createNewFile();		
					
				} catch (IOException e) {
					log.error("Failed to create data file " + fname, e);				
				}
			}
		}
		private String getLatestFile() {
			String version = prefix + "." + Long.toString(System.currentTimeMillis());
			File dir = new File(dataDir);
			if(dir.exists()) {
				String[] files = dir.list(new FilenameFilter() {

					@Override
					public boolean accept(File dir, String name) {						
						return name.startsWith(prefix);
					}
					
				});
				if(files != null && files.length != 0) {
					Arrays.sort(files);
					version = files[files.length - 1];
				}				
			}
			
			return version;
		}
		
		public synchronized Map<String, Float> load() {
			Map<String, Float> newData = new HashMap<String, Float>();
							
			// load from data file
			try {
				InputStream is = new FileInputStream(new File(dataDir, fname));

				BufferedReader r = new BufferedReader(new InputStreamReader(is,
						IOUtils.CHARSET_UTF_8));

				// file format : key=value
				for (String line; (line = r.readLine()) != null;) {
					int delimIndex = line.lastIndexOf(delimiter);
					if (delimIndex < 0) {
						continue;
					}

					int endIndex = line.length();
					String key = line.substring(0, delimIndex);
					String val = line.substring(delimIndex + 1, endIndex);

					Float fval = Float.parseFloat(val);
					newData.put(key, fval);
				}
				r.close();
				is.close();

			} catch (IOException e) {
				log.error("Failed to load data from file " + fname, e);
				return null;
			}
			return newData;
		}
		
		public synchronized void save(Map<String, Float> data) {
			try {
				// open file in append mode
				OutputStream os = new FileOutputStream(new File(dataDir, fname), true);

				BufferedWriter w = new BufferedWriter(new OutputStreamWriter(os,
						IOUtils.CHARSET_UTF_8));

				if(data != null) {
					for(Entry<String, Float> entry: data.entrySet()) {
						StringBuilder buf = new StringBuilder();
						buf.append(entry.getKey());
						buf.append(delimiter);
						buf.append(entry.getValue());
						buf.append("\n");
						w.write(buf.toString());
					}
				}
				w.close();
				os.close();				
			} catch (IOException e) {
				log.error("Failed to save data to file " + fname, e);
			}
		}

		public synchronized void save(String key, Float value) {			
			Map<String, Float> tmp = new HashMap<String, Float>();
			tmp.put(key, value);
			save(tmp);			
		}
		
	}

	public Map<String, Float> getAll() {		
		return data;
	}
	
}
