package org.sninwo.lnbook.book.dao;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.commons.lang.xwork.StringUtils;
import org.hibernate.Query;
import org.hibernate.criterion.Criterion;
import org.hibernate.criterion.Example;
import org.sninwo.lnbook.base.Page;
import org.sninwo.lnbook.base.hibernate.HibernateDao;
import org.sninwo.lnbook.book.entity.Book;
import org.springframework.stereotype.Component;

@Component
public class BookDao extends HibernateDao<Book, Long> {
	//书籍名称与id的对应关系，用于做缓存，在保存和删除书籍时更新，在查找书籍的引用时用到这个缓存。
	private Map<String, Long> bookNameIdMap = new HashMap<String, Long>();
	
	/**
	 * 保存书籍，同时更新名称与id的对应关系缓存
	 */
	public void save(Book book){
		//如果是修改书籍，则检查书籍的标题是否更改，如果更改，则删除原书籍的缓存
		if (book.getId() != null){
			Book oldBook = get(book.getId());
			if (oldBook != null){
				if (oldBook.getName() != null && !oldBook.getName().equals(book.getName())){
					bookNameIdMap.remove(oldBook.getName());
				}
			}
			getSession().evict(oldBook); //从session中除非，不然修改时报错
		}
		//保存书籍到数据库
		super.save(book);
		//更新名称与id对应关系缓存
		bookNameIdMap.put(book.getName(), book.getId());
	}

	/**
	 * 删除书籍，同时更新名称与id的对应关系缓存
	 */
	public void delete(Book book){
		bookNameIdMap.remove(book.getName());
		super.delete(book);
	}
	
	public Page<Book> findPage(Page<Book> page, Book book){
		Criterion c = Example.create(book).enableLike().excludeZeroes();
		Page<Book> p = this.findPage(page, c);
		return p;
	}
	
	/**
	 * 保存书籍，如果相同类别、书名和作者的记录已存在，则更新，否则新增。
	 */
	public void saveUnique(Book book){
		Book book2 = findImportUnique(book.getTypeId(), book.getName(), book.getAuthor());
		if (book2 != null){
			book.setId(book2.getId());
		}
		save(book);
	}
	
	public Book findImportUnique(Long typeId, String name, String author){
		String hql = "from Book where typeId=? and name=? and author=?";
		return findUnique(hql, typeId, name, author);
	}
	
	public List<Book> findAllBooksByTypeId(Long typeId){
		return this.findBy("typeId", typeId);
	}
	
	/**
	 * 根据书籍的名称取得id，如果书籍不存在则返回0
	 */
	public Long findBookIdByName(String name){
		Long id = bookNameIdMap.get(name);
		if (id == null){
			String hql = "select id from Book where name=?";
			List<Long> list = this.find(hql, name);
			bookNameIdMap.put(name, list.isEmpty() ? 0 : list.get(0));
		}
		return bookNameIdMap.get(name);
	}
	
	/**
	 * 通过关键词全文查找书籍记录，如果有多个关键词，先考虑用and来联合查询，如果找不到记录再尝试用or来查询。
	 */
	@SuppressWarnings("unchecked")
	public Page<Map<String, Object>> searchBook(Page<Map<String, Object>> page, String[] keyWords){
		if (keyWords == null || keyWords.length == 0)
			return page;
		
		String[] words = addLikeToWords(keyWords); //给关键词加上%
		//先尝试用and查询
		String likes = getLikeWhere(words.length, "and");
		String countHql = "select count(*) from Original o where ";
		Long count = findUnique(countHql + likes, (Object[])words);
		if (count == 0 && words.length > 1){ //如果有多个关键词，用and查询找不到结果则改用or查询
			likes = getLikeWhere(words.length, "or");
			count = findUnique(countHql + likes, (Object[])words);
			if (count == 0){ //还是找不到数据，返回空page
				return page;
			}
		}
		//count不为0，继续查询书籍记录
		page.setTotalItems(count);
		String queryHql = "select new map(b as book,bt as type) " +
				"from Book b,BookType bt,Original o " +
				"where b.originalId=o.id and b.typeId=bt.id and ";
		Query q = this.createQuery(queryHql + likes, (Object[])words);
		q.setFirstResult(page.getOffset());
		q.setMaxResults(page.getPageSize());
		page.setResult(q.list());
		
		return page;
	}
	
	private String getLikeWhere(int wordCount, String joinType){
		List<String> likes = new ArrayList<String>(wordCount);
		for (int i = 0; i < wordCount; i ++){
			likes.add(" o.content like ?");
		}
		return StringUtils.join(likes, " " + joinType + " ");
	}
	
	private String[] addLikeToWords(String[] keyWords){
		String[] words = new String[keyWords.length];
		for (int i = 0; i < words.length; i ++){
			words[i] = "%" + keyWords[i] + "%";
		}
		return words;
	}
}
