package cn.q3c.smzdm.web;

import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.SQLException;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;

import org.apache.commons.validator.UrlValidator;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.math.NumberUtils;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.cache.Cache;
import org.springframework.cache.ehcache.EhCacheCacheManager;

import cn.keyvalue.ccf.common.util.ImageUtils;
import cn.q3c.smzdm.bean.Crawler;
import cn.q3c.smzdm.bean.Images;
import cn.q3c.smzdm.dao.CrawlerDAO;

/**
 * 页面内容采集接口 同一链接，5分钟内不得重复提交（时间根据缓存时间确定）
 * 
 * 
 * @author wangxinning
 * 
 */
@Controller
public class CrawlerWebService {

	private static Logger logger = Logger.getLogger(CrawlerWebService.class);

	@Autowired
	@Qualifier("crawlerDAO")
	private CrawlerDAO crawlerDAO;

	@Autowired
	@Qualifier("cacheManager")
	private EhCacheCacheManager cacheManager;

	// 本地保存图片目录
	@Value("${imagelocalpath}")
	private String imagelocalpath;
	
	//http头
	@Value("${imagehttp}")
	private String imagehttp;
	
	@Value("${imagelimit}")
	private int imagelimit;

	/**
	 * 接收需要下载的图片数据，POST方式
	 * 
	 * @param jsonString
	 * @return
	 * @throws IOException
	 * @throws MalformedURLException
	 * @throws SQLException
	 */
	@RequestMapping(value = "/images.json", method = RequestMethod.POST)
	@ResponseBody
	public Map<String, Object> receiveImages(
			@RequestParam("imageurl") String imageurl,
			@RequestParam("source") int source) {
		
		logger.debug("receiveImages:imageurl=" + imageurl + ";source=" + source);
		
		Map<String, Object> retMap = new HashMap<String, Object>();

		try {
			// 1.判断重复
			if (crawlerDAO.getImages(imageurl) != null) {
				retMap.put("result", -1);// 重复
			} else {
				// 2.0 File
				File savePath = makeSavePath( imagelocalpath, imagelimit );
				
				// 2.1 本地文件
				String fname = RandomStringUtils.randomAlphanumeric(5);
				File localFile = new File(savePath.toString() + File.separator
						+ fname);

				// 2.2 下载文件
				FileUtils.copyURLToFile(new URL(imageurl), localFile, 30000,
						30000);

				// 2.3 文件格式
				String imageType = ImageUtils.getImageType(localFile);

				// 2.4
				if (StringUtils.isNotBlank(imageType)) {
					File destFile = new File(localFile.toString() + "."
							+ imageType);
					FileUtils.moveFile(localFile, destFile);

					Images image = new Images();
					image.setImageurl(imageurl);
					image.setLocalpath(destFile.toString());
					image.setSource(source);
					image.setCdate(new Date());
					image.setUdate(new Date());

					long nid = crawlerDAO.addImages(image);

					//图片序号
					retMap.put("nid", nid);
					//图片本地保存位置
					retMap.put("localpath", image.getLocalpath());
					//图片访问地址
					retMap.put("url", imagehttp + StringUtils.remove(image.getLocalpath(), imagelocalpath));
					//结果
					retMap.put("result", 1);// 成功
				}
				else
				{
					FileUtils.deleteQuietly(localFile);
					retMap.put("result", -3);//图片格式不识别
				}
			}
		} catch (Exception e) {
			retMap.put("result", -2);// 系统异常，需要查看日志
			logger.error("download images error!", e);
		}

		return retMap;
	}
	
	/**
	 * 图片代理，保存在本地一份，返回本地图片访问地址
	 * @param imageurl
	 * @param source
	 * @return
	 * @throws SQLException 
	 */	
	@RequestMapping(value = "/imgproxy.do", method = RequestMethod.GET)
	public String imgproxy(
			@RequestParam("imageurl") String imageurl,
			@RequestParam("source") int source) throws SQLException {
		
		logger.debug("imgproxy:imageurl=" + imageurl + ";source=" + source);
		
		Map<String, Object> retMap = this.receiveImages(imageurl, source);
		if( retMap != null && (Integer)retMap.get("result") == 1 )
		{
			String url = (String)retMap.get("url");
			
			if( StringUtils.isNotBlank(url) )
			{
				return "redirect:" + StringUtils.trimToEmpty(url);
			}
		}
		else
		{
			Images image = crawlerDAO.getImages(imageurl);
			if( image != null )
			{
				return "redirect:" + imagehttp + StringUtils.remove(image.getLocalpath(), imagelocalpath);
			}
		}
		
		return "redirect:" + StringUtils.trimToEmpty(imageurl);
	}
	
	
	
	
	
	
	/**
	 * 构建文件存储目录
	 * @throws IOException 
	 */
	private File makeSavePath( String saveRootPath, int imagelimit ) throws IOException
	{
		File file = new File(saveRootPath);
		if( !file.exists() )
		{
			FileUtils.forceMkdir(file);
		}
		
		//2.目录数
		int fileCount = ArrayUtils.getLength(file.listFiles());
		
		if( fileCount == 0 )
		{
			fileCount++;
		}
		
		File retFile = new File( file.toString() + File.separator + fileCount );
		
		if( !retFile.exists() )
		{
			FileUtils.forceMkdir(retFile);
			
		}
		else
		{
			fileCount = ArrayUtils.getLength(retFile.listFiles());
			if( fileCount >= imagelimit )
			{
				retFile = new File( file.toString() + File.separator + (fileCount+1) );
			}
		}
		
		return retFile;
	}	
	
	/**
	 * 判断链接是否需要提交
	 * 
	 * @param jsonString
	 * @return
	 * @throws SQLException
	 */
	@RequestMapping(value = "/isCrawler.json", method = RequestMethod.POST)
	@ResponseBody
	public Map<String, Object> isCrawler(
			@RequestParam("url") String url) throws Exception {

		logger.debug("isCrawler=" + url);

		Map<String, Object> retMap = new HashMap<String, Object>();

		int retCode = validateCrawlerCache(url);

		retMap.put("result", retCode);
		return retMap;
	}
	
	
	

	/**
	 * 接收页面数据，POST方式
	 * 
	 * @param jsonString
	 * @return
	 * @throws SQLException
	 */
	@RequestMapping(value = "/crawler.json", method = RequestMethod.POST)
	@ResponseBody
	public Map<String, Object> receiveCrawler(
			@RequestParam("content") String content,
			@RequestParam("description") String description,
			@RequestParam("keywords") String keywords,
			@RequestParam("title") String title,
			@RequestParam("udate") String udate,
			@RequestParam("url") String url,
			@RequestParam("cdate") String cdate,
			@RequestParam("source") int source) throws Exception {

		logger.debug("receiveCrawler:url=" + url + ";title=" + title);

		Map<String, Object> retMap = new HashMap<String, Object>();

		Crawler crawler = new Crawler();
		crawler.setUrl(StringUtils.trimToEmpty(url));
		crawler.setTitle(StringUtils.trimToEmpty(new String(title
				.getBytes("iso-8859-1"), "gbk")));
		crawler.setContent(StringUtils.trimToEmpty(new String(content
				.getBytes("iso-8859-1"), "gbk")));
		crawler.setDescription(StringUtils.trimToEmpty(new String(description
				.getBytes("iso-8859-1"), "gbk")));
		crawler.setKeywords(StringUtils.trimToEmpty(new String(keywords
				.getBytes("iso-8859-1"), "gbk")));
		crawler.setSource(source);
		if (NumberUtils.isNumber(cdate)) {
			Date cDate = new Date();
			cDate.setTime(NumberUtils.toLong(cdate));
			crawler.setCdate(cDate);
		}
		if (NumberUtils.isNumber(udate)) {
			Date uDate = new Date();
			uDate.setTime(NumberUtils.toLong(udate));
			crawler.setUdate(uDate);
		}

		// 校验逻辑，可以提出
		int retCode = validateCrawler(crawler);

		if (retCode == 1) {
			// 写入数据库，-1是错误
			long dbCode = crawlerDAO.addCrawler(crawler);

			if (dbCode == -1) {
				retCode = -1;
			}
		}

		retMap.put("result", retCode);
		return retMap;
	}

	/**
	 * 校验对象，返回响应码 1成功 -1数据库错误 -2url错误 -3content为空 -4 5分钟内不能重复
	 * 
	 * @param crawler
	 * @return
	 */
	private int validateCrawler(Crawler crawler) {
		UrlValidator urlValidator = new UrlValidator();
		if (!urlValidator.isValid(crawler.getUrl())) {
			return -2;
		}

		if (StringUtils.isBlank(crawler.getContent())) {
			return -3;
		}
		
		if( validateCrawlerCache( crawler.getUrl() ) != 1 )
		{
			return validateCrawlerCache( crawler.getUrl() );
		}
		else
		{
			Cache cache = cacheManager.getCache("crawlerCache");
			cache.put(crawler.getUrl(), "1");
		}
		
		return 1;
	}
	
	
	private int validateCrawlerCache(String url) {

		Cache cache = cacheManager.getCache("crawlerCache");
		if (cache.get(url) != null) {
			return -4;
		}
		
		return 1;
	}
}