package com.pachiraframework.springcloud.rabbitmq.controller;




import java.io.IOException;
import java.util.List;

import io.searchbox.client.JestClient;
import io.searchbox.core.Index;
import io.searchbox.core.Search;
import io.searchbox.core.SearchResult;

import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;

import com.pachiraframework.springcloud.rabbitmq.domain.Article;
import com.pachiraframework.springcloud.rabbitmq.domain.Book;
import com.pachiraframework.springcloud.rabbitmq.repository.BookRepository;



@RestController
public class Controller {

	@Autowired
	JestClient jestClient;
	
	@Autowired
	BookRepository bookRepository;
	
	
	/**
	 * localhost:20002/saveone
	 * http://192.168.99.100:9200/atguigu/news/1
	 * @return
	 */
	@GetMapping("/saveone")
	public String saveone(){
		
//		1,给Es中索引（保存）一个文档
		
		Article article = new Article();
		
		article.setId(1);
		article.setTitles("asa");
		article.setAuthor("asa");
		article.setContent("asa");
		
		//构建一个索引功能
		Index index = new Index.Builder(article).index("atguigu").type("news").build();
		
		try {
			jestClient.execute(index);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		return "ok";
	}
	
	/**
	 * localhost:20002/search
	 * http://192.168.99.100:9200/atguigu/news/1
	 * @return
	 */
	@GetMapping("/search")
	public String search(){
		String json = "{\n"
				+ "	\"query\": {\n"
				+ "		\"match\" : {\n"
				+ "			\"content\" : \"asa\"\n"
				+ "}\n"
				+ "}\n"
				+ "}\n";
		Search search = new Search.Builder(json).addIndex("atguigu").addType("news").build();
		try {
			SearchResult execute = jestClient.execute(search);
			
			System.out.println(execute.getJsonString());
			
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		return "ok";
		
	}
	
	
	
	

	/**
	 * localhost:20002/saveonedata
	 * http://192.168.99.100:9200/atguigu/news/1
	 * @return
	 */
	@GetMapping("/saveonedata")
	public String saveonedata(){
		
		Book book = new Book();
		
		book.setBookName("西游记");
		book.setAuthor("吴承恩");
		book.setId(1);
		bookRepository.index(book);
		
		
		return "ok";
	}
	
	
	/**
	 *  localhost:20002/findbybookName
	 * @return
	 */
	@GetMapping("/findbybookName")
	public String findbybookName(){
		
		List<Book> findByBookNameLike = bookRepository.findByBookNameLike("西游");
		for (int i = 0; i < findByBookNameLike.size(); i++) {
			System.out.println(findByBookNameLike.get(i).getBookName());
		}
		findByBookNameLike = bookRepository.findByBookNameLike("西记");
		for (int i = 0; i < findByBookNameLike.size(); i++) {
			System.out.println(findByBookNameLike.get(i).getBookName());
		}
		return "ok";
	}
	/**
	 * ES有个比较流行的可视化工具Kibana，我没有在windows上安装成功，说是权限不够...
	 * 
	 * 默认的ES是没有中文分词的，它只是将每个中文字都拆分开了（索引要求不高，这个也够用了）
	 * 中文分词需要安装中文分词包在ES的plugins目录下
	 * 
	 * https://github.com/medcl/elasticsearch-analysis-ik，这个包下好了，用maven build一下
	 * 
	 * 用\target\releases里面的 elasticsearch-analysis-ik-6.5.0.zip解压在ES的plugins下的新建“ik”文件下
	 * 
	 * 
	 */
	
	
	
	
	
	
}
