package com.terren.spider.run;

import java.util.ArrayList;
import java.util.List;

import com.terren.spider.core.html.processor.BasicHtmlProcessor;
import com.terren.spider.core.html.processor.HtmlArticleSearchUrlProcessor;
import com.terren.spider.entity.biz.ArticleSearch;
import com.terren.spider.entity.common.SpiderLog;
import com.terren.spider.entity.core.Entry;
import com.terren.spider.run.basic.BasicSpiderArticleSearchRunner;
import com.terren.spider.util.common.LogUtil;
import com.terren.spider.util.common.StringUtil;

/**
 * 已过期 使用SpiderArticleSearchRunner 替代
 */
public class SpiderArticleUrlSearchRunner extends BasicSpiderArticleSearchRunner {

	protected String[] urls;

	@Override
	public List<Entry> getEntrys() {
		List<Entry> entries = new ArrayList<>();
		try {
			tasks = spiderTaskService.getOneSpiderTask();// 找到spider任务
			if (null != tasks) {
				tasks.setStatus(9);// 状态设成 9 表示正在运行
				spiderTaskService.updateStatus(tasks);// 状态设成 9 表示正在运行
				entries = entryService.getEntryBySourceId(tasks.getSourceId());// 找到同一来源下的所有entry
				for (Entry entry : entries) {
					String keyword = tasks.getKeyword();
					Long taskId = tasks.getTaskId();
					Integer totalRecordCount = tasks.getLimit();
					String beginTime = tasks.getBeginTime();
					String endTime = tasks.getEndTime();
					//String site = tasks.getSite();
					entry.setKeyword(keyword);
					entry.setTaskId(taskId);
					entry.setRecordLimit(totalRecordCount);
					entry.setBeginTime(beginTime);
					entry.setEndTime(endTime);
					String entryUrl = entry.getEntryUrl();
					tasks.setAct(entry.getAct());
					String url = "";
					if(!tasks.getFlag()){//针对多个entry相同sourceId的关键词搜索解决关键词多次编码问题
						url = StringUtil.formatSearchUrl(getFormatTask(tasks),entryUrl);
						tasks.setFlag(true);
					}else{
						url = StringUtil.formatSearchUrl(tasks,entryUrl);
					}
					entry.setEntryUrl(url);
				}
			}
		} catch (Exception e) {
			LogUtil.logger.error("SpiderArticleUrlSearchRunner error", e);
			taskFaild();
		}

		return entries;
	}

	@Override
	public void postRun(Entry entry) {
		if (tasks != null) {
			try {
				if (urls != null && urls.length > 0) {
					// ****************把抓到的 链接 标题 来源等 写入数据库************************/
					BasicHtmlProcessor urlProcessor = handler.getHtmlFetchUtil().getUrlProcessor();
					if (urlProcessor instanceof HtmlArticleSearchUrlProcessor) {
						List<ArticleSearch> articleSearchs = ((HtmlArticleSearchUrlProcessor) urlProcessor)
								.getArticleList();
						// 创建集合保存限定条数的链接标题内容
						List<ArticleSearch> lastArticles = new ArrayList<ArticleSearch>();
						int size = entry.getRecordLimit();
						if (size > articleSearchs.size()) {
							lastArticles.addAll(articleSearchs);
						} else {
							for (int i = 0; i < size; i++) {
								for (ArticleSearch as : articleSearchs) {
									if (as.getIndex() != null && as.getIndex() == i) {
										lastArticles.add(as);

									}
								}

							}
						}
						// write to db ..
						articleSearchService.saveTitleInfo(lastArticles);
					}
				}
				tasks.setStatus(2);// 状态设成 2 表示完成抓取链接 、标题等任务
			} catch (Exception e) {
				tasks.setStatus(-1);//
				log.error("runTask error", e);
			}
			try {
				spiderTaskService.updateStatus(tasks);
			} catch (Exception e) {
				log.error("runTask update status error", e);
			}

		}
	}

	/**
	 * 搜索引擎 抓取链接任务需要重写run方法
	 */
	public void run() throws Exception {
		long startTime, endTime;
		startTime = System.currentTimeMillis();
		log.info("【爬虫开始】请耐心等待...");
		List<Entry> entries = getEntrys();
		for (Entry entry : entries) {
			// 记录爬虫日志
			SpiderLog slog = new SpiderLog();
			String message = "抓取链接" + "[taskId:" + entry.getTaskId() + "]" + "[搜索引擎:" + entry.getEntryDesc() + "]"
					+ "[关键词:" + entry.getKeyword() + "]"  + "[url:" + entry.getEntryUrl() + "]" + (tasks.getSite() == null ? ""
							: "[site:" + tasks.getSite() + "]");
			Integer size = null;
			Integer status = 1;
			try {
				// 记录爬虫日志
				slog = saveLog(entry);
				postInitEntry(entry);
				handler = spiderHandler(entry);
				log.info("搜索关键词标题及链接任务开始.." + "[taskId:" + entry.getTaskId() + "]" + "[搜索引擎:" + entry.getEntryDesc()
						+ "]" + "[关键词:" + entry.getKeyword() + "]"+ "[url:" + entry.getEntryUrl() + "]"
						+ (tasks.getSite() == null ? "" : "[site:" + tasks.getSite() + "]"));
				urls = handler.findUrl();
				size = urls.length;

			} catch (Exception e) {
				message = "[taskId:" + entry.getTaskId() + "]" + "[搜索引擎:" + entry.getEntryDesc() + "]" + "[关键词:"
						+ entry.getKeyword() + "]"+ "[url:" + entry.getEntryUrl() + "]" + (tasks.getSite() == null ? "" : "[site:" + tasks.getSite() + "]")
						 + "抓取链接失败:" + e.getMessage();
				status = 0;// 失败，出现异常
				throw e;
			} finally {
				postRun(entry);
				// 更新爬虫日志
				updateLog(slog, entry, message, size, status);
			}

		}
		endTime = System.currentTimeMillis();
		log.info("【爬虫结束】,共耗时约" + ((endTime - startTime) / 1000 / 60) + "分。");
	}

}
