package com.leaderment.timatt.webmagic.util;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;

import org.apache.log4j.Logger;

import com.leaderment.timatt.SpringContextUtil;
import com.leaderment.timatt.mybatis.bo.CrawlerDomainProxyRedisMappingBO;
import com.leaderment.timatt.mybatis.entity.CrawlerHtml;
import com.leaderment.timatt.mybatis.mapper.CrawlerDomainProxyRedisMappingMapper;
import com.leaderment.timatt.mybatis.mapper.CrawlerHtmlMapper;
/**
 * 

* @author Kerain 

* @version 2019年1月14日 下午4:08:31 

* 数据库操作相关方法
 */
public class DBUtil {
	public static SimpleDateFormat sdf= new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
	static Logger logger=Logger.getLogger(DBUtil.class);
	
	/**
	 * 保存抓取的内容文件
	 * @param urlId			url主键
	 * @param filePath
	 * @param crawlDate
	 * @param currentPage
	 */
	public static void saveFile(Integer urlId,String filePath,Date crawlDate,Integer currentPage){
		CrawlerHtmlMapper crawlerHtmlMapper=(CrawlerHtmlMapper)SpringContextUtil.getBean("crawlerHtmlMapper");
		CrawlerHtml crawlerHtml=new CrawlerHtml();
		crawlerHtml.setUrlId(urlId);
		crawlerHtml.setFilePath(filePath);
		crawlerHtml.setHtmlCrawlTime(crawlDate);
		crawlerHtml.setHtmlPage(currentPage);
		crawlerHtml.setIsParsed(false);
		crawlerHtmlMapper.insertOrUpdate(crawlerHtml);
	}
	
	public static void test(){
		logger.info(888);
		/*CrawlerDomainProxyRedisListNameRepository crawlerDomainProxyRedisListNameRepository=(CrawlerDomainProxyRedisListNameRepository)SpringContextUtil.getBean("crawlerDomainProxyRedisListNameRepository");
		List<CrawlerDomainProxyRedisListName> list=crawlerDomainProxyRedisListNameRepository.findAll();*/
		CrawlerDomainProxyRedisMappingMapper crawlerDomainProxyRedisMappingMapper=(CrawlerDomainProxyRedisMappingMapper)SpringContextUtil.getBean("crawlerDomainProxyRedisMappingMapper");
		List<CrawlerDomainProxyRedisMappingBO> list=crawlerDomainProxyRedisMappingMapper.selectDomainAndListName();
		logger.info("listSize:"+list.get(0).getCrawlerDomain());
	}
}
