package com.geek.novel.services

import android.text.TextUtils
import com.geek.novel.bo.AppContext
import com.geek.novel.common.Constant
import com.geek.novel.entity.*
import com.geek.novel.event.EventHandler
import com.geek.novel.services.parser.impl.NovelSectionParserImpl
import com.geek.novel.utils.*
import com.geek.novel.view.loading.LoadingDialogUtil
import com.geek.novel.vo.CrawlerSectionVo
import com.vicpin.krealmextensions.queryFirst
import java.util.*
import kotlin.collections.ArrayList


/**小说爬取接口*/
open class CrawlerRepository private constructor(){

	/**单例模式*/
	companion object{
		@Volatile
		private var instance: CrawlerRepository? = null
		fun getInstance(): CrawlerRepository {
			if(instance == null){
				synchronized(CrawlerRepository::class.java){
					if(instance == null){
						instance = CrawlerRepository()
					}
				}
			}
			return instance!!
		}
	}

	/**书籍的service*/
	private var bookRepository = BookRepository.getInstance()

	/**书籍的service*/
	private var bookCrawlerRepository = BookCrawlerRepository()

	/**书籍章节索引的service*/
	private var bookIndexRepository = BookIndexRepository()

	/**书籍章节的service*/
	private var bookSectionRepository = BookSectionRepository()

	/**章节解析器*/
	private var novelSectionParser = NovelSectionParserImpl()

	fun getById(id: String?): CrawlerSource?{
		var crawlerSource: CrawlerSource? = null
		if(!TextUtils.isEmpty(id)){
			crawlerSource = CrawlerSource().queryFirst { equalTo("id", id!!.toInt()) }
		}
		return crawlerSource
	}

	open fun findById(id: String?): CrawlerSource? {
		var crawlerSource = this.getById(id)
		if(crawlerSource == null){
			crawlerSource = findByActive()
		}
		return crawlerSource
	}

	/**
	 * 获取active的爬虫源
	 */
	open fun findByActive(): CrawlerSource? {
		return CrawlerSource().queryFirst { equalTo("active", true) }
	}

	/**
	 * 获取爬虫源
	 */
	open fun findCrawlerSource(book: BookRead): CrawlerSource{
		val bookCrawler = this.getBookCrawler(book)
		val crawlSourceId = bookCrawler.crawlSourceId
		return this.findById(crawlSourceId) ?: throw RuntimeException("获取爬虫源失败！")
	}


	/**获取书本的爬虫源*/
	fun getBookCrawler(book: BookRead): BookCrawler{
		val bookCrawlerId = book.bookCrawlerId
		if(FormatUtil.isNotEmpty(bookCrawlerId)){
			return bookCrawlerRepository.findById(bookCrawlerId!!)
				?: throw RuntimeException("获取书籍爬虫源失败！bookId：${book.bookId}, bookCrawlerId：${bookCrawlerId}")
		}else{
			val list = bookCrawlerRepository.findByBookId(book.bookId!!)
			if(list.isEmpty()){
				throw RuntimeException("获取书籍爬虫源列表失败！bookId：${book.bookId}")
			}
			//获取爬虫源
			val crawlerSource = this.findByActive()
				?: throw RuntimeException("获取爬虫源失败！bookId：${book.bookId}")
			for (bookCrawler in list) {
				if(bookCrawler.crawlSourceId == "${crawlerSource.id}"){
					return bookCrawler
				}
			}
			return list[0]
		}
	}

	/**
	 * 根据书本，判断爬虫目标
	 */
	fun startCrawlerBook(book: BookRead, handler: EventHandler, toast: Boolean=false): List<BookIndex> {
		//判断是否是本地书籍
		if(book.local){
			val list = bookIndexRepository.findByBookId(book.bookId!!)
			if(list.isNotEmpty()){
				//回传给页面
				handler.sendHandlerMessage(Constant.handler_event_load_book_index, list)
			}else{
				this.toast("获取章节列表为空！请检查本地书籍格式是否正确！")
			}
			return ArrayList()
		}

		//获取书本的爬虫源
		val bookCrawler = this.getBookCrawler(book)

		//如果一段时间内获取过，就不再获取了
		val lastCrawlerTime = bookCrawler.lastCrawlerTime
		val bookEndTime = book.endTime?:Date(0)
		//书籍是否完本状态、是的话，判断最后更新时间与完本时间，然后直接查库，没有再爬取
		val timeJudge = book.bookStatus == 1 && lastCrawlerTime != null && lastCrawlerTime.after(bookEndTime)
		//判断完本状态、或者最近时间在24小时以内，直接加载本地数据
		if(timeJudge || DateUtil.isTimeInMinutes(lastCrawlerTime, (24*60).toLong())){
			LogCatUtil.e("???", "书籍处于完本状态或者上次请求时间在限定时间内，不需要反复发起请求")
			val bookCrawlerId = "${bookCrawler.id}"
			val list = bookIndexRepository.findByBookIdAndBookCrawlerId(book.bookId!!, bookCrawlerId)
			if(list.isNotEmpty()){
				//回传给页面
				handler.sendHandlerMessage(Constant.handler_event_load_book_index, list)
				return list
			}
		}

		//爬取章节列表
		val crawlerSource = this.findCrawlerSource(book)

		//开始爬取列表
		return this.crawlerBookIndex(crawlerSource, book, bookCrawler, handler, toast)
	}

	/**
	 * 爬取小说索引目录
	 */
	open fun crawlerBookIndex(crawlerSource: CrawlerSource, book: BookRead, bookCrawler: BookCrawler, handler: EventHandler, toast: Boolean=false): List<BookIndex> {
		val baseUrl = crawlerSource.baseUrl
		val indexPageUrl = crawlerSource.indexPageUrl

		val crawlBookId = bookCrawler.crawlBookId

		var url = HttpUtils.parseUrl(baseUrl, crawlBookId)
		url = HttpUtils.parseUrl(url, indexPageUrl)

		LogCatUtil.i("???", url)

		//获取书籍列表
		if(toast){
			this.showLoading("正在获取目录索引")
		}

		val result = HttpUtils.doGet(url)
		return this.handleCrawlerBookIndexResult(result, crawlerSource, book, bookCrawler, handler)
	}

	/**处理请求结果*/
	private fun handleCrawlerBookIndexResult(result: HttpClientResult, crawlerSource: CrawlerSource, book: BookRead, bookCrawler: BookCrawler, handler: EventHandler): List<BookIndex>{
		if(result.isSuccess){
			val html = result.content
			//解析内容
			val bookIndexes = novelSectionParser.parseBookIndex(crawlerSource, book, bookCrawler, html)

			val crawlBookId = bookCrawler.crawlBookId
			LogCatUtil.i("???", "解析到书籍列表(${book.bookName})(${crawlBookId}), 目标爬取章数: ${bookIndexes.size}")

			//记录最后一次爬取时间，在书架发现有更新后删除
			bookCrawler.lastCrawlerTime = Date()
			bookCrawlerRepository.save(bookCrawler)

			if(bookIndexes.isEmpty()){
				val bookCrawlerId = "${bookCrawler.id}"
				val list = bookIndexRepository.findByBookIdAndBookCrawlerId(book.bookId!!, bookCrawlerId)
				if(list.isEmpty()){
					bookCrawler.lastCrawlerTime = null
					bookCrawlerRepository.save(bookCrawler)
				}
				//回传给页面
				handler.sendHandlerMessage(Constant.handler_event_load_book_index, list)
				return list
			}

			//保存最后一章的目录
			val bookIndex = bookIndexes[bookIndexes.size - 1]
			book.lastIndexName = bookIndex.indexName
			book.lastIndexUpdateTime = Date()
			//分批，每次只保存一百章，这样就不会锁表太久
			saveBookIndies(book, bookIndexes)
			bookRepository.save(book)

			//保存到当前一个爬虫源的信息
			bookCrawler.lastIndexId = bookIndex.id
			bookCrawler.lastIndexName = bookIndex.indexName
			bookCrawlerRepository.save(bookCrawler)

			//回传给页面
			handler.sendHandlerMessage(Constant.handler_event_load_book_index, bookIndexes)
			return bookIndexes
		}else{
			this.hideLoading()
//			this.toast("缓存章节列表信息异常：${result.message}")
			//发起请求加载章节目录数据-出错
			handler.sendHandlerMessage(Constant.handler_event_load_book_index_error)
		}
		return ArrayList()
	}


	/**
	 * 分批，每次只保存N章，这样就不会锁表太久
	 */
	private fun saveBookIndies(book: BookRead, bookIndexes: List<BookIndex>) {
		val n = 100
		val size = bookIndexes.size
		val list: MutableList<BookIndex> = ArrayList()
		var lastIndexNum = 0
		var lastIndex = bookIndexes[0]
		var i = 0
		while (i < size) {
			val bookIndex = bookIndexes[i]
			list.add(bookIndex)
			if (i != 0 && i % n == 0) {
				//回归主线程，更新进度
				this.showLoading("更新进度：${i}/$size")
				bookIndexRepository.saveAll(list)
				LogCatUtil.i("???", "保存书本（${book.bookName}）章节索引第${lastIndexNum + 1}章(${lastIndex.indexName})到第${i + 1}章(${list[list.size - 1].indexName})")
				list.clear()
				lastIndex = bookIndex
				lastIndexNum = i
			}
			i++
		}
		if (list.isNotEmpty()) {
			bookIndexRepository.saveAll(list)
			LogCatUtil.i("???", "保存书本（${book.bookName}}）章节索引第${i + 1}章(${lastIndex.indexName})到第${list.size}章(${list[list.size - 1].indexName})")
			list.clear()
		}
	}

	/**根据书本章节内容，判断爬虫目标, 开线程爬取数据*/
	fun startCrawlerBookSectionAsync(book: BookRead, bookIndex: BookIndex, handler: EventHandler, toast: Boolean=false) {
		//开启线程爬取数据
		Thread{
			kotlin.run {
				startCrawlerBookSection(book, bookIndex, handler, toast)
			}
		}.start()
	}

	/**
	 * 根据书本章节内容，判断爬虫目标
	 */
	open fun startCrawlerBookSection(book: BookRead, bookIndex: BookIndex, handler: EventHandler, toast: Boolean=false) {
		if(toast){
			this.showLoading("正在加载章节信息")
		}

		val code = Constant.handler_event_load_book_section

		//判断是否是本地书籍
		if(book.local){
			val section = bookSectionRepository.findSectionById(bookIndex.sectionId)
			if(section != null){
				//回传给页面
				val crawlerSectionVo = CrawlerSectionVo.of(bookIndex, section, Constant.RESPONSE_CODE_SUCCESS)
				handler.sendHandlerMessage(code, crawlerSectionVo)
			}else{
				this.toast("获取章节信息为空！请检查本地书籍格式是否正确！")
			}
			return
		}

		//获取书本的爬虫源
		val bookCrawler = this.getBookCrawler(book)

		//本地已经存在数据，不需要爬取
		val crawler = FormatUtil.isNullToBoolean(bookIndex.crawler, false)
		if(crawler){
			LogCatUtil.i("???", "章节(${bookIndex.indexName})标记为已缓存，开始判断是否需要爬取")
			val section = bookSectionRepository.findSectionByIdAndBookCrawlerId(bookIndex.sectionId, "${bookCrawler.id}")
			if(section != null && !TextUtils.isEmpty(section.content)){
				LogCatUtil.i("???", "本地已经存在章节(${section.title})，不需要爬取")
				val crawlerSectionVo = CrawlerSectionVo.of(bookIndex, section, Constant.RESPONSE_CODE_SUCCESS)
				handler.sendHandlerMessage(code, crawlerSectionVo)
				return
			}else{
				LogCatUtil.i("???", "章节(${bookIndex.indexName})内容为空，需要重新爬取数据")
			}
		}
		//爬取章节列表
		val crawlerSource = this.findCrawlerSource(book)
		this.crawlerBookSection(crawlerSource, book, bookCrawler, bookIndex, handler, code, toast)
	}

	/**
	 * 爬取小说章节内容
	 */
	open fun crawlerBookSection(crawlerSource: CrawlerSource, book: BookRead, bookCrawler: BookCrawler, bookIndex: BookIndex, handler: EventHandler, code: Int, toast: Boolean) {
		val sectionUrl = bookIndex.sectionUrl
		if (FormatUtil.isEmpty(sectionUrl)) {
			throw RuntimeException("章节目标地址不能为空！")
		}

		val bookId = book.bookId!!
		val indexId = bookIndex.id!!

		LogCatUtil.i("???", "爬取章节：" + bookIndex.indexName)

		// 这里判断章节是否爬取过了,如果爬取过，就删除就的章节重新爬取
		bookSectionRepository.deleteByBookIdAndIndexId(bookId, indexId)

		// 保存到数据库(先入库，后面爬取完毕后再更新)
		val section = BookSection()
		section.initEntity()
		section.bookId = bookId
		section.indexId = indexId
		section.content = ""
		section.title = bookIndex.indexName
		section.bookCrawlerId = "${bookCrawler.id}"
		bookSectionRepository.save(section)

		//标记BookIndex为已经爬取过了
		bookIndex.sectionId = section.id
		bookIndex.crawler = false//标记为未爬取，然后爬取完成后标记为true
		bookIndexRepository.save(bookIndex)

		//开始异步爬取数据
		val result = StringBuffer()
		this.crawlerSectionByUrl(0, crawlerSource, bookCrawler, sectionUrl, result, section, bookIndex, handler, code, toast)

		LogCatUtil.i("???", "保存章节(" + section.id + ")：" + bookIndex.indexName)

	}

	/**
	 * 根据url爬取章节
	 * @param crawlerSource 爬虫源
	 * @param sectionUrl    章节url地址
	 * @throws Exception    网络异常
	 */
	private fun crawlerSectionByUrl(pageIndex: Int,
									crawlerSource: CrawlerSource, bookCrawler: BookCrawler, sectionUrl: String?,
									result: StringBuffer, section: BookSection, bookIndex: BookIndex,
									handler: EventHandler, code: Int, toast: Boolean) {
		val baseUrl = crawlerSource.baseUrl
		val config = crawlerSource.getCrawlerConfig()

		var url = baseUrl
		if(config.isSectionUrlBookNeed){
			url = HttpUtils.parseUrl(baseUrl, bookCrawler.crawlBookId)
		}
		url = HttpUtils.parseUrl(url, sectionUrl)

		LogCatUtil.i("???", "爬取章节内容：$url")

		//提示加载的列表数量
		val crawlerSectionVo = CrawlerSectionVo.of(bookIndex, section, Constant.RESPONSE_CODE_SUCCESS)
		crawlerSectionVo.pageIndex = pageIndex
		handler.sendHandlerMessage(Constant.handler_event_load_book_section_page, crawlerSectionVo)
		if(toast){
			this.showLoading("正在加载章节信息：${pageIndex+1}")
		}


		val t = HttpUtils.doGet(url)
		if(t.isSuccess){
			val html: String = t.content
			// 获取主体内容
			val content = novelSectionParser.getSectionContent(config, html)
			result.append(content)
//				LogCatUtil.i("???", "爬取到数据：$content")
			// 获取下一页，然后递归
			val nextPageUrl = novelSectionParser.getSectionNextPageUrl(crawlerSource, html)
			if (FormatUtil.isNotEmpty(nextPageUrl)) {
				//拼接一个换页符
				result.append(Constant.BUS_SECTION_PAGE_SEPARATE)

				//递归爬取下一页
				this.crawlerSectionByUrl(pageIndex+1, crawlerSource, bookCrawler, nextPageUrl, result, section, bookIndex, handler, code, toast)
			} else {
				section.content = result.toString()
				bookSectionRepository.save(section)

				val crawlerSectionVo1 = CrawlerSectionVo.of(bookIndex, section, Constant.RESPONSE_CODE_SUCCESS)
				handler.sendHandlerMessage(code, crawlerSectionVo1)
				LogCatUtil.i("???", "爬取结束啦！")

				//标记为爬取成功
				bookIndex.crawler = true
				bookIndexRepository.save(bookIndex)
			}
		}else{
			//隐藏loading弹框，并做错误处理
			this.hideLoading()
//			this.toast("缓存章节信息(${bookIndex.indexName})异常：${t.message}")

			val crawlerSectionVo2 = CrawlerSectionVo.of(bookIndex, section, Constant.RESPONSE_CODE_ERROR)
			handler.sendHandlerMessage(Constant.handler_event_load_book_section_error, crawlerSectionVo2)
		}

	}

	/**
	 * 格式化小说章节内容
	 */
	fun formatBookSectionContent(book: BookRead, content: String): String {
		var result = content
		//本地书籍不需要格式化
		if(book.local){
			return result
		}
		try {
			val crawlerSource = findCrawlerSource(book)

			val crawlerConfig = crawlerSource.getCrawlerConfig()
			var sectionRules = crawlerConfig.sectionRules

			if(sectionRules == null){
				sectionRules = ArrayList()
			}

			//把开头的换行符去掉
			result = BookUtil.replaceContent(result, "^(<br.*?/?>)+:")

			//替换掉注释
			result = BookUtil.replaceContent(result, "<!--.+?-->:")

			//把换页符替换掉
			val pageSeparate = Constant.BUS_SECTION_PAGE_SEPARATE
			result = BookUtil.replaceContent(result, "$pageSeparate<br.*?/?>:")
			result = BookUtil.replaceContent(result, "$pageSeparate:")

			//添加换行符号进行匹配
			sectionRules.add("<br.*?/?>:\n")

			for (rule in sectionRules){
				result = BookUtil.replaceContent(result, rule)
			}

			//干掉所有html标签
			result = BookUtil.removeAllHtmlTag(result)

			result = result.trim()

		} catch (e: Exception) {
			this.toast("格式化内容出错：${e.message}")
			LogCatUtil.e("???", e.message, e)
		}
		return result
	}


	private fun showLoading(text: String){
		AppContext.getContext().doOnMainThread {
			LoadingDialogUtil.getInstance().showLoading(text)
		}
	}

	private fun hideLoading(){
		AppContext.getContext().doOnMainThread {
			LoadingDialogUtil.getInstance().dismiss()
		}
	}

	private fun toast(msg: String){
		AppContext.getContext().doOnMainThread {
			ToastUtils.toast(msg)
		}
	}

}