package com.tdp.neo4j;

import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.MalformedInputException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Collection;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Random;
import java.util.Scanner;
import java.util.Set;
import java.util.Stack;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.neo4j.graphdb.GraphDatabaseService;
import org.neo4j.graphdb.Transaction;
import org.neo4j.graphdb.factory.GraphDatabaseFactory;
import org.neo4j.kernel.impl.util.FileUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.neo4j.config.EnableNeo4jRepositories;
import org.springframework.data.neo4j.config.Neo4jConfiguration;
import org.springframework.data.neo4j.core.GraphDatabase;
import org.springframework.data.neo4j.support.Neo4jTemplate;

@SpringBootApplication
public class MyApplication implements CommandLineRunner {
	final static Charset ENCODING = StandardCharsets.UTF_8;
	private Log log = LogFactory.getLog(getClass());
	private boolean marked = false;
	private static final String FOLDERPATH = "E:/Datas";
	/**
	 * 在CELF中用于指示节点是否在这一轮被访问过
	 */
	private HashMap<Long, Integer> flags;

	/**
	 * 优先队列，用于存放CELF中每个结点的影响范围
	 */
	private Queue<AuthorInfluence> priorityQueue = null;
	/**
	 * 存储边信息
	 */
	private Map<KeyOfRelation, Double> cacheRelations;

	@Configuration
	@EnableNeo4jRepositories(basePackages = "com.tdp.neo4j")
	static class ApplicationConfig extends Neo4jConfiguration {

		public ApplicationConfig() {
			setBasePackage("com.tdp.neo4j");
		}

		@Bean
		GraphDatabaseService graphDatabaseService() {
			return new GraphDatabaseFactory()
					.newEmbeddedDatabase("E:/Datas/mytest.db");
			// return new
			// SpringRestGraphDatabase("http://localhost:7474/db/data/");
		}
	}

	@Autowired
	AuthorsRepository authorsRepository;
	@Autowired
	Neo4jTemplate template;

	@Autowired
	GraphDatabase graphDatabase;
	@Autowired
	CooperationsRepository cooperationsRepository;

	public void run(String... args) throws Exception {
		Transaction tx = graphDatabase.beginTx();
		try {
			// 处理数据
			// importData();
			this.cacheRelationship();
			Iterator iterator = cacheRelations.entrySet().iterator();
			GreedyStep(20, 1);
			// GreedyAlgoithm(5, 1000);
			// 贪心爬山算法
			// GreedyAlgoithm();
			tx.success();
		} finally {
			tx.close();
		}
	}

	/**
	 * @throws IOException
	 * @throws MalformedInputException
	 *             导入数据并开始处理
	 */
	private void importData() throws IOException, MalformedInputException {
		// 解析csv文件
		Scanner scanner = new Scanner(System.in);
		System.out.println("选择执行任务:\n" + "1、建立结点-2、建立关系-3、设置概率值\n");
		int choose = scanner.nextInt();
		String fileName;
		if (choose == 1)
			fileName = "testOutAuthors";
		else if (choose == 2)
			fileName = "testOutRelations";
		else if (choose == 3)
			fileName = "testOutprob";
		else
			fileName = null;
		Path path = Paths.get(FOLDERPATH, fileName + ".csv");
		Charset charset = Charset.forName("UTF-8");
		String line;
		try (BufferedReader reader = Files.newBufferedReader(path, charset)) {
			System.out.println("Start");
			while ((line = reader.readLine()) != null) {
				if (choose == 1)
					buildNodes(line);
				else if (choose == 2)
					buildRelations(line);
				else
					setProbility(line);
			}
		} catch (IOException e) {
			System.err.println(e);
		}
		scanner.close();
		System.out.println("OVER");
	}

	/**
	 * @param line
	 *            通过文件构建结点
	 */
	private void buildNodes(String line) {
		if (line != null && !line.equals("")) {
			String[] splittedLine = line.split(",");
			int id = Integer.parseInt(splittedLine[0]);
			String name = splittedLine[1];
			name = name.replaceAll("\"", "");
			Authors author = new Authors(id, name);
			authorsRepository.save(author);
		}
	}

	/**
	 * @param line
	 *            通过文件构建节点之间的关系
	 */
	private void buildRelations(String line) {
		if (line != null && !line.equals("")) {
			String[] splittedLine = line.split(",");
			int id1 = Integer.parseInt(splittedLine[0]);
			int id2 = Integer.parseInt(splittedLine[1]);
			log.info("start query node pair");
			Authors author1 = authorsRepository.findByAuthorID(id1);
			Authors author2 = authorsRepository.findByAuthorID(id2);
			log.info("end query node pair");
			author1.cooperateWith(author2);
			authorsRepository.save(author1);
			author2.cooperateWith(author1);
			authorsRepository.save(author2);
			log.info("-----------------");
		}
	}

	/**
	 * @param line
	 *            设置两个节点在特定话题上的相互影响概率值
	 */
	private void setProbility(String line) {
		if (line != null && !line.equals("")) {
			String[] splittedLine = line.split(",");
			int id1 = Integer.parseInt(splittedLine[0]);
			int id2 = Integer.parseInt(splittedLine[1]);
			double prob = Double.parseDouble(splittedLine[2]);
			Cooperations coop = authorsRepository
					.findRelationsBewteenTwoAuthorsThroughAuthorID(id1, id2);
			coop.setProb(prob);
			cooperationsRepository.save(coop);
		}
	}

	/*---------- the begin of GreedyAlgorithm --------------*/

	/**
	 * @param k
	 *            贪心爬山算法，输入为种子个数,模拟次数
	 */
	private void GreedyAlgoithm(int k, int times) {
		List<Long> seedList = findKNodes(k, times);
	}

	/**
	 * @param k
	 *            Greedy algorithm to find k seeds
	 * @return
	 */
	public List<Long> findKNodes(int k, int times) {
		// 存储所有的节点
		Set<Long> allAuthors = authorsRepository.findAllAuthors();
		// 存储找到的种子节点
		List<Long> seedsGreedy = new LinkedList<Long>();
		// 存储所有节点中种子节点的补集
		List<Long> backSeeds = new LinkedList<Long>();
		// 遍历每个节点时存储种子节点与此节点的并集
		List<Long> seedsWithNode = new LinkedList<Long>();
		double answer = 0;
		Long selectedAuthor = null;
		for (int i = 0; i < k; i++) {
			// 找到所有节点中除去种子节点的节点集
			for (Long author : allAuthors) {
				if (!seedsGreedy.contains(author))
					backSeeds.add(author);
			}
			seedsWithNode.clear();
			seedsWithNode.addAll(seedsGreedy);
			Iterator<Long> backAuthors = backSeeds.iterator();
			// 计算每一个节点
			while (backAuthors.hasNext()) {
				Long backAuthor = backAuthors.next();
				seedsWithNode.add(backAuthor);
				double temp = RTimesSimulations(times, seedsWithNode);
				seedsWithNode.remove(seedsWithNode.size() - 1);
				if (temp > answer) {
					answer = temp;
					selectedAuthor = backAuthor;
				}
			}
			answer = 0;
			seedsGreedy.add(selectedAuthor);

			log.info("find the " + i + " node");
		}
		for (Long seednode : seedsGreedy) {
			System.out.println(authorsRepository.findByid(seednode)
					.getAuthorName());
		}
		return seedsGreedy;
	}

	/**
	 * @param r
	 * @param mSeeds
	 * @return 进行R次蒙特卡罗模拟,返回平均值
	 */
	public double RTimesSimulations(int r, List<Long> mSeeds) {
//		int[] tt = new int[20];
		double activeInfluence = 0;
		Map<Integer, Set<Long>>[] results = (Map<Integer, Set<Long>>[]) new Map[r];
		double[] avg = new double[mSeeds.size()];
		log.info("R time");
		for (int j = 0; j < r; j++) {
			results[j] = singleDiffusion(mSeeds);
			for (int i = 1; i <= mSeeds.size(); i++) {
				avg[i - 1] += results[j].get(i).size();
			}

		}
		for (int i = 0; i < mSeeds.size(); i++) {
			activeInfluence += avg[i];
		}
		activeInfluence = activeInfluence / r;
		log.info("end time");
		return activeInfluence;
	}

	/**
	 * @param seeds
	 * @return 一次蒙特卡罗模拟
	 */
	public Map<Integer, Set<Long>> singleDiffusion(List<Long> seeds) {
		// Store the id of active nodes
		Random random = new Random();
		Set<Long> active = new HashSet<Long>(); // 激活态的节点
		Stack<Long> target = new Stack<Long>(); // 未处理的节点
		Map<Integer, Set<Long>> result = new HashMap<>();
		for (Long author : seeds) {
			active.clear();
			// System.out.println(author.getAuthorName()+"---");
			target.push(author);
			while (target.size() > 0) {
				Long nodeid = target.pop();
				active.add(nodeid);
				// 出度邻接点
				Set<Long> outNeighbors = authorsRepository
						.findOutNeighbors(nodeid);
				Iterator<Long> outNeighborIterator = outNeighbors.iterator();
				while (outNeighborIterator.hasNext()) {
					Long neighbor = outNeighborIterator.next();
					KeyOfRelation tempKeyOfRelation = new KeyOfRelation(nodeid,
							neighbor);
					double randtarget = cacheRelations.get(tempKeyOfRelation);
					if (randtarget == 0) {
					} else if (random.nextFloat() <= randtarget)
						if (!active.contains(neighbor)) {
							{
								target.push(neighbor);
							}
						}
				}
			}
			result.put(result.size() + 1, new HashSet<Long>(active));
		}
		// if(marked) System.out.println();
		return result;
	}

	/*------------------  end of greedyalgorithm  ---------------------*/
	/**
	 * 构建一个cache来缓存所有边的信息。
	 */
	public void cacheRelationship() {
		cacheRelations = new HashMap<KeyOfRelation, Double>();
		Collection<Cooperations> coop = cooperationsRepository
				.getCooperations();
		Iterator<Cooperations> cooperationsIterator = coop.iterator();
		while (cooperationsIterator.hasNext()) {
			Cooperations cooperations = cooperationsIterator.next();
			Long id1 = cooperations.getAuthor1().id;
			Long id2 = cooperations.getAuthor2().id;
			double prob = cooperations.getProb();
			KeyOfRelation tempkey = new KeyOfRelation(id1, id2);
			cacheRelations.put(tempkey, prob);
		}
	}

	/*--------------------CELF Algorithm-----------------------*/

	public void GreedyStep(int k, int times) throws IOException {
		log.info("start Greedy");
		Set<Long> allAuthors = authorsRepository.findAllAuthors();
		List<Long> tempList = new LinkedList<Long>();
		Long firstSeed = null;
		flags = new HashMap<>();
		priorityQueue = new PriorityQueue(allAuthors.size(), ifCompare);
		for (Long Author : allAuthors) {
			tempList.add(Author);
			double temp = RTimesSimulations(times, tempList);
			tempList.remove(0);
			flags.put(Author, 1);
			priorityQueue.add(new AuthorInfluence(Author, temp));
		}
		AuthorInfluence best = priorityQueue.poll();
		log.info(best);
		firstSeed = best.getIdOfAuthor();
		double current_influence = best.getInfluence();
		log.info("get into CELF");
		this.Optimization(firstSeed, current_influence, k, times);
		log.info("End CELF");
	}

	private void Optimization(Long firstSeed, double current_influence, int k,
			int times) throws IOException {
		List<Long> seeds = new LinkedList<Long>();
		double bigInfluence = current_influence;
		seeds.add(firstSeed);
		for (int i = 2; i <= k; i++) {
			while (true) {
				AuthorInfluence tempAuthorInfluence = priorityQueue.poll();
				Long idOfSeed = tempAuthorInfluence.getIdOfAuthor();
				int flagOfNode = flags.get(idOfSeed);
				if (flagOfNode == i) {
					log.info("get a node");
					seeds.add(idOfSeed);
					bigInfluence += tempAuthorInfluence.getInfluence();
					log.info("sum bigInfluence"+bigInfluence);
					break;
				} else {
					seeds.add(idOfSeed);
					double temp = RTimesSimulations(times, seeds);
					temp = temp - bigInfluence;
					if(temp<0) temp=0;
					log.info("Marginal :"+temp);
					priorityQueue.add(new AuthorInfluence(idOfSeed, temp));
					flags.put(idOfSeed, i);
					seeds.remove(seeds.size() - 1);
				}
			}
			System.out.println(bigInfluence);
			if (i % 5 == 0) {
				// marked = true;
				for(int m=0;m<3;m++)
				writeSeeds(seeds, i);
			}
		}
		System.out.println();
		//
		//
		// String fileName = "CELF_With_" + k + "_Nodes";
		// List<String> seedName = new LinkedList<>();
		//
		// for (Long seednode : seeds) {
		// seedName.add(authorsRepository.findByid(seednode).getAuthorName()
		// + " " + authorsRepository.findByid(seednode).getAuthorID());
		// }
		// seedName.add("-------------------");
		// writeFile(fileName, seedName);
		// Set<Long> activedNodes = this.singleDiffusion(seeds);
		// List<String> activeNode = new LinkedList<>();
		// for (Long activedId : activedNodes) {
		// activeNode.add(authorsRepository.findByid(activedId)
		// .getAuthorName()
		// + " "
		// + authorsRepository.findByid(activedId).getAuthorID());
		// }
		// writeFile(fileName, activeNode);
	}

	public void writeSeeds(List<Long> seeds, int k) throws IOException {
		String fileName = "CELF_With_" + k + "_Nodes";
		List<String> seedName = new LinkedList();
		seedName.add("-------------------\n");
		for (Long seednode : seeds) {
			seedName.add(authorsRepository.findByid(seednode).getAuthorName()
					+ " " + authorsRepository.findByid(seednode).getAuthorID());
		}
		seedName.add("-------------------\n");
		writeFile(fileName, seedName);

		Map<Integer, Set<Long>> activeMap = singleDiffusion(seeds);
		Set<Long> activedNodes = new HashSet<Long>();
		Set<Long>[] sets = new Set[k];

		for (int j = 1; j <= k; j++) {
			sets[j - 1] = activeMap.get(j);
			for (Long nodeLong : sets[j - 1]) {
				activedNodes.add(nodeLong);
			}
		}
		List<String> activeNode = new LinkedList();
		activeNode.add("*******************\n");
		for (Long activedId : activedNodes) {
			activeNode.add(authorsRepository.findByid(activedId)
					.getAuthorName()
					+ "\t"
					+ authorsRepository.findByid(activedId).getAuthorID());
		}
		activeNode.add("**************");
		writeFile(fileName, activeNode);
		
	}

	/**
	 * 优先队列priority queue
	 */
	public static Comparator<AuthorInfluence> ifCompare = new Comparator<AuthorInfluence>() {
		// if you want output the biggest ,use o2.getId()-o1.getId()
		// if you want output the least, use o1.getId()-o2.getId()

		@Override
		public int compare(AuthorInfluence o1, AuthorInfluence o2) {

			return (int) (o2.getInfluence() - o1.getInfluence());
		}

	};

	/*------------------------End CELF-----------------------*/

	/*-------------------------Write File---------------------*/

	private static void writeFile(String fileName, List<String> lines)
			throws IOException {
		lines.add("\n");
		fileName = fileName + ".txt";
		File file = new File(fileName);
		if (!file.exists()) {
			file.createNewFile();
		}
		FileWriter fileWriter = new FileWriter(file.getName(), true);
		BufferedWriter writer = new BufferedWriter(fileWriter);
		for (String line : lines) {
			writer.write(line);
			writer.newLine();
		}
		writer.close();
	}

	public static void main(String[] args) throws Exception {
		FileUtils.deleteRecursively(new File("accessingdataneo4j.db"));
		SpringApplication.run(MyApplication.class, args);

	}
}
