package com.soft.crawler.spider;

import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;

import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.apache.log4j.Logger;

import com.soft.crawler.fetcher.FetchResult;
import com.soft.crawler.fetcher.Page;
import com.soft.crawler.mybatis.ZhijianCopy;
import com.soft.crawler.plugin.DigPoint;
import com.soft.crawler.plugin.DupRemovalPoint;
import com.soft.crawler.plugin.FetchPoint;
import com.soft.crawler.plugin.ParsePoint;
import com.soft.crawler.plugin.TargetPoint;
import com.soft.crawler.plugin.TaskPushPoint;
import com.soft.crawler.plugin.impl.BeginPointImpl;
import com.soft.crawler.plugin.impl.DigPointImpl;
import com.soft.crawler.plugin.impl.DupRemovalPointImpl;
import com.soft.crawler.plugin.impl.FetchPointImpl;
import com.soft.crawler.plugin.impl.ParsePointImpl;
import com.soft.crawler.plugin.impl.TargetPointImpl;
import com.soft.crawler.plugin.impl.TaskPushPointImpl;
import com.soft.crawler.plugin.util.CommonUtil;
import com.soft.crawler.task.Task;
import com.soft.crawler.url.SourceUrlChecker;
import com.soft.crawler.xml.Field;
import com.soft.crawler.xml.Rule;
import com.soft.crawler.xml.Rules;
import com.soft.crawler.xml.Target;

public class Spider {
	Logger logger = Logger.getLogger(Spider.class);
	public Task task;

	public void init(Task task) {
		this.task = task;
	}

	public void run() throws Exception {
		try {
			// 扩展点：begin 蜘蛛开始
			BeginPointImpl beginPoint = new BeginPointImpl();
			task = beginPoint.confirmTask(task);
			if (task == null)
				return;

			// 扩展点：fetch 获取HTTP内容
			FetchResult result = null;
			FetchPoint fetchPoint = new FetchPointImpl();
			result = fetchPoint.fetch(task, result);
			// ########打印抓到的东西
			// logger.info(result.getPage().getContent());

			if (result == null)
				return;

			// 扩展点：dig new url 发觉新URL
			Collection<String> newUrls = null;
			DigPoint digPoint = new DigPointImpl();
			newUrls = digPoint.digNewUrls(result, task, newUrls);
			Iterator<String> its = newUrls.iterator();
			logger.info(newUrls);
			handleNewUrls(newUrls);

			Page page = result.getPage();
			// logger.info(page.getContent());
			if (page == null) {
				return;
			}

			// 扩展点：target 确认是否有目标配置匹配当前URL
			Target target = null;
			TargetPoint targetPoint = new TargetPointImpl();
			target = targetPoint.confirmTarget(task, target);
			if (target == null) {
				return;
			}

			task.target = target;

			// 检查sourceUrl
			Rules rules = task.site.getTargets().getSourceRules();
			Rule sourceRule = SourceUrlChecker.checkSourceUrl(rules, task.sourceUrl);
			if (sourceRule == null) {
				return;
			}

			// 扩展点：parse 把已确认好的目标页面解析成为Map对象
			List<Map<String, Object>> models = null;
			ParsePoint parsePoint = new ParsePointImpl();
			models = parsePoint.parse(task, target, page, models);
			if (models == null || models.size() == 0) {
				return;
			}

			for (Iterator<Map<String, Object>> _it = models.iterator(); _it.hasNext();) {
				Map<String, Object> model = _it.next();
				for (Iterator<Field> it = target.getModel().getField().iterator(); it.hasNext();) {
					Field f = it.next();
					// 去掉那些被定义成 参数 的field
					if ("1".equals(f.getIsParam()) || "true".equals(f.getIsParam()))
						model.remove(f.getName());
				}
				model.put("sourceUrl", task.sourceUrl);
				model.put("taskUrl", task.url);

			}

			// TODO: 数据处理
			Object obj = onParse(task, models);
			System.out.println(obj);

			// 将获得的object入库
			saveOne(obj);
			
			
			
			// 处理调度
			if (task.digNewUrls != null && !task.digNewUrls.isEmpty()) {
				Set<String> urls = new HashSet<String>(task.digNewUrls.size());
				for (String s : task.digNewUrls) {
					if (s == null || s.trim().length() == 0) {
						continue;
					}
					urls.add(s);
				}

				if (!urls.isEmpty()) {
					handleNewUrls(urls);
					task.digNewUrls.clear();
					task.digNewUrls = null;
				}
			}

		} catch (Exception e) {
			// logger.error(e);
			throw e;
		}
	}

	private void handleNewUrls(Collection<String> newUrls) throws Exception {
		if (newUrls != null && !newUrls.isEmpty()) {

		} else {
			newUrls = new ArrayList<String>();
		}

		// 扩展点：dup_removal URL去重,然后变成Task
		Collection<Task> validTasks = null;
		DupRemovalPoint dupRemovalPoint = new DupRemovalPointImpl();
		validTasks = dupRemovalPoint.removeDuplicateTask(task, newUrls, validTasks);
		if (validTasks == null || validTasks.size() == 0) {
			return;
		}

		// 扩展点：task_push 将任务放入队列
		pushTask(validTasks);

	}

	public Collection<Task> pushTask(Collection<Task> validTasks) throws Exception {
		TaskPushPoint point = new TaskPushPointImpl();
		point.pushTask(validTasks);
		return validTasks;
	}

	/**
	 *  处理得到的model中的数据 将map转为json 将json转为object 并返回
	 * @param task
	 * @param models
	 */
	public Object onParse(Task task, List<Map<String, Object>> models) {
		ZhijianCopy entity = null;
		try {
			for (int i = 0; i < models.size(); i++) {
				Map<String, Object> map = models.get(i);
				StringBuilder sb = new StringBuilder();
				for (Iterator<Entry<String, Object>> it = map.entrySet().iterator(); it.hasNext();) {
					Entry<String, Object> e = it.next();
					boolean isBlank = false;

					if (e.getValue() == null)
						isBlank = true;
					else if (e.getValue() instanceof String && ((String) e.getValue()).trim().length() == 0)
						isBlank = true;
					else if (e.getValue() instanceof List && ((ArrayList<?>) e.getValue()).isEmpty())
						isBlank = true;
					else if (e.getValue() instanceof List && !((ArrayList<?>) e.getValue()).isEmpty()) {
						if (((ArrayList<?>) e.getValue()).size() == 1
								&& String.valueOf(((ArrayList<?>) e.getValue()).get(0)).trim().length() == 0)
							isBlank = true;
					}

					if (isBlank) {
						if (sb.length() > 0)
							sb.append("_");
						sb.append(e.getKey());
					}
				}

				String content = CommonUtil.toJson(map);
				logger.info(content);
				// 将models中的json转为object jackson
				// 并将转换的java对象存入list中
				entity = (ZhijianCopy) CommonUtil.JsonToObject(content, ZhijianCopy.class);
				if (entity != null) {
//					SpiderTest.crawList = new ArrayList<ZhijianCopy>();
//					SpiderTest.crawList.add(entity);
//					System.out.println(SpiderTest.crawList);
					// 返回这个转换过后的entity
					return entity;
				}
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
		return entity;
	}
	
	// 将一条抓取信息入库
	public void saveOne(Object obj) {

		// mybatis的配置文件
		String resource = "mybatisConfig.xml";
		// 使用类加载器加载mybatis的配置文件（它也加载关联的映射文件）
		InputStream is = Spider.class.getClassLoader().getResourceAsStream(resource);
		// 构建sqlSession的工厂
		SqlSessionFactory sessionFactory = new SqlSessionFactoryBuilder().build(is);
		// 使用MyBatis提供的Resources类加载mybatis的配置文件（它也加载关联的映射文件）
		// Reader reader = Resources.getResourceAsReader(resource);
		// 构建sqlSession的工厂
		// SqlSessionFactory sessionFactory = new
		// SqlSessionFactoryBuilder().build(reader);
		// 创建能执行映射文件中sql的sqlSession
		SqlSession session = sessionFactory.openSession();
		/**
		 * 映射sql的标识字符串，
		 * me.gacl.mapping.userMapper是userMapper.xml文件中mapper标签的namespace属性的值，
		 * getUser是select标签的id属性值，通过select标签的id属性值就可以找到要执行的SQL
		 */
		// String statement = "me.gacl.mapping.userMapper.getUser";//映射sql的标识字符串
		// 执行查询返回一个唯一user对象的sql
		try {
//			Users user = session.selectOne("selectByPrimaryKey", 1);
//			System.out.println(user);
//			Users u = new Users();
//			u.setName("yht");
//			u.setAge(18);
//			session.insert("insertSelective", u);
//			session.commit();
			session.insert("insertSelective", obj);
			session.commit();
		}  finally {
			session.close();
		}
		

	
	}
	
	
}
