package com.goldgov.information.web;

import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

import javax.servlet.http.HttpServletRequest;

import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.goldgov.information.service.ArticleRules;
import com.goldgov.information.service.ArticleRulesQuery;
import com.goldgov.information.service.ArticleRulesService;
import com.goldgov.information.service.ArticleService;
import com.goldgov.kcloud.core.json.JsonErrorObject;
import com.goldgov.kcloud.core.json.JsonObject;
import com.goldgov.kcloud.core.json.JsonQueryObject;
import com.goldgov.kcloud.core.json.JsonSuccessObject;
import com.goldgov.utils.CmsUtils;
import com.goldgov.utils.CrawlerUtils;
import com.goldgov.utils.CrawlerUtilsTest;
import com.goldgov.utils.HttpUtil;
import com.goldgov.utils.PropertyUtil;

import io.swagger.annotations.Api;
import io.swagger.annotations.ApiImplicitParam;
import io.swagger.annotations.ApiImplicitParams;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import springfox.documentation.annotations.ApiIgnore;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Spider;

/** 
 * @ClassName:InformationController
 * @Description: 信息采集
 * @author MaN
 * @date 2018年7月11日 
 */

@RestController
@RequestMapping("/portal/information")
@Api("信息发布接口")
public class InformationController {

	@Autowired
	private ArticleService articleService;
	
	@Autowired
	private ArticleRulesService articleRulesService;
	
	protected final Log logger = LogFactory.getLog(this.getClass());
	
	@PostMapping("/publishArticles")
	@ApiOperation(value = "发布文章到cms", notes ="")
	public JsonObject<Object> publishArticles(@ApiParam(value = "articleIDs", required = true) @RequestParam("ids") String[] ids,HttpServletRequest request) throws IOException{
		articleService.publishArticles(ids);
		new CmsUtils().start();
		return JsonSuccessObject.SUCCESS;
	}
	
	/**
	 * @author RongSL
	 * @Date 2018/08/17
	 * @version 1.0.0
	 * */
	@GetMapping("/startReptilian")
	@ApiOperation(value = "启动信息采集", notes ="")
	@ApiImplicitParams({ 
		@ApiImplicitParam(name = "rulesID", value = "规则ID", paramType = "query"),
		@ApiImplicitParam(name = "runningState", value = "启动or停止", paramType = "query")})
	public JsonObject<Object> startReptilian(@ApiIgnore ArticleRules obj,HttpServletRequest request){
		//更新规则
		obj.setLastOperationDate(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()));
		if (PropertyUtil.objectNotEmpty(obj.getRulesIDs())) {
			for (String rulesID : obj.getRulesIDs()) {
				obj.setRulesID(rulesID);
				articleRulesService.updateArticleRules(obj);
			}
		}
		return JsonSuccessObject.SUCCESS;
	}
	
	/**
	 * @author RongSL
	 * @Date 2018/08/17
	 * @version 1.0.0
	 * */
	@PostMapping("/runReptilian")
	@ApiOperation(value = "开始抓取信息", notes ="")
	@ApiImplicitParams({@ApiImplicitParam(name = "rulesIDs", value = "规则IDs", paramType = "query")})
	public JsonObject<Object> runReptilian(@ApiIgnore ArticleRulesQuery query,HttpServletRequest request){
		//根据栏目ID查询相关规则
		query.setPageSize(-1);
		List<ArticleRules> rules=articleRulesService.getArticleRulesList(query);
		ArticleRules a;
		Integer count=0;
		for (ArticleRules ar : rules) {
			Date now = null;
			try {
				now = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").parse(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()));
			} catch (ParseException e1) {
				e1.printStackTrace();
			}
			//为了减少更新字段提升性能所以new
			a = new ArticleRules();
			a.setRulesID(ar.getRulesID());
			// 更新规则操作时间
			a.setLastOperationDate(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(now));
			articleRulesService.updateArticleRules(a);
			// 调用爬虫方法
			try {
				Request requests = new Request();
				requests.putExtra("_level", 1);
				requests.putExtra("_rules", JSON.toJSONString(ar));
				requests.setUrl(ar.getUrlAddress());
				CrawlerUtils.setCount(0);
				CrawlerUtils.setRulesID(a.getRulesID());
				CrawlerUtils.setCreateDate(now);
				CrawlerUtils.setIds(new ArrayList<String>());
				Spider s=Spider.create(new CrawlerUtils()).addRequest(requests)
						.thread(10).isTest(false).isPublish(ArticleRules.AUDIT_STATE_YES.intValue() != ar.getIsAudit().intValue());
				s.run();
				//累计每次爬取数据量
				count += CrawlerUtils.getCount();
			} catch (Exception e) {
				logger.error("========根据栏目批量抓取文章异常", e);
			}
		}
		if(count>0) {
			new CmsUtils().start();
		}
		JsonSuccessObject.SUCCESS.setCode("200");
		JsonSuccessObject.SUCCESS.setMessage("本次成功抓取"+count+"条文章");
		return JsonSuccessObject.SUCCESS;
	}
	
	/**
	 * @author RongSL
	 * @Date 2018/08/17
	 * @version 1.0.0
	 * */
	@SuppressWarnings("unchecked")
	@RequestMapping("/testCrawlerExpression")
	@ApiOperation(value = "测试爬虫表达式", notes ="测试测试")
	@ApiImplicitParams({ 
		@ApiImplicitParam(name = "testUrl", value = "测试网址", paramType = "query"),
		@ApiImplicitParam(name = "testRule", value = "爬虫表达式", paramType = "query"),
		@ApiImplicitParam(name = "testArticleUrl", value = "文章地址规则", paramType = "query"),
	})
	public JsonQueryObject<ArticleRules> testCrawlerExpression(@ApiIgnore ArticleRulesQuery query,HttpServletRequest request){
		JSONObject json=new JSONObject();
		json.put("_level", 1);
		json.put("_url", query.getTestUrl());
		json.put("_rule", query.getTestRule());
		json.put("_articleUrlRule",query.getTestArticleUrl());
		Request requests = new Request();
		requests.putExtra("_rules", json);
		requests.setUrl(json.get("_url").toString());
		Spider s=Spider.create(new CrawlerUtilsTest()).addRequest(requests);
		s.run();
		query.setResultList(s.getTestResult());
		return new JsonQueryObject<>(query);
	}
	
	/**
	 * @author RongSL
	 * @Date 2018/08/17
	 * @version 1.0.0
	 * */
	@PostMapping("/runReptilianByCategoryId")
	@ApiOperation(value = "根据栏目批量抓取文章", notes ="抓取文章")
	@ApiImplicitParams({ 
		@ApiImplicitParam(name = "searchCmsCategoryID", value = "栏目ID", paramType = "query")
	})
	public JsonObject<Object> runReptilianByCategoryId(@ApiIgnore ArticleRulesQuery query,HttpServletRequest request){
		//根据栏目ID查询相关规则
		if(PropertyUtil.objectNotEmpty(query.getSearchCmsCategoryID())) {
			query.setSearchCmsCategoryIDs(query.getSearchCmsCategoryID().split(","));
			query.setSearchCmsCategoryID(null);
		}
		List<ArticleRules> rules=articleRulesService.getArticleRulesList(query);
		//判断规则是否为空
		if(rules.isEmpty()) {
			JsonErrorObject.ERROR.setCode("100");
			JsonErrorObject.ERROR.setMessage("该栏目下尚未配置任何抓取规则");
			return JsonErrorObject.ERROR;
		}else {
			ArticleRules a;
			Integer count=0;
			for (ArticleRules ar : rules) {
				//为了减少更新字段提升性能所以new
				a = new ArticleRules();
				a.setRulesID(ar.getRulesID());
				// 更新规则操作时间
				a.setLastOperationDate(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()));
				articleRulesService.updateArticleRules(a);
				// 调用爬虫方法
				try {
					Request requests = new Request();
					requests.putExtra("_level", 1);
					requests.putExtra("_rules", JSON.toJSONString(ar));
					requests.setUrl(ar.getUrlAddress());
					CrawlerUtils.setCount(0);
					CrawlerUtils.setRulesID(a.getRulesID());
					CrawlerUtils.setCreateDate(new Date());
					CrawlerUtils.setIds(new ArrayList<String>());
					Spider s=Spider.create(new CrawlerUtils()).addRequest(requests)
							.thread(10).isTest(false).isPublish(ArticleRules.AUDIT_STATE_YES.intValue() != ar.getIsAudit().intValue());
					s.run();
					//累计每次爬取数据量
					count += CrawlerUtils.getCount();
				} catch (Exception e) {
					logger.error("========根据栏目批量抓取文章异常", e);
				}
			}
			if(count>0) {
				new CmsUtils().start();
			}
			JsonSuccessObject.SUCCESS.setCode("200");
			JsonSuccessObject.SUCCESS.setMessage("本次成功抓取"+count+"条文章");
			return JsonSuccessObject.SUCCESS;
		}
	}
	
	@GetMapping("/testUrl")
	@ApiOperation(value = "测试网络", notes ="")
	public void testUrl() {
		String url="http://www.baidu.com";
		System.out.println("=====================jsoup");
		HttpUtil.jsoup(url);
		
		System.out.println("======================curl");
		String []cmds = {"curl", "-i", "-w", "状态%{http_code}；DNS时间%{time_namelookup}；"
				+ "等待时间%{time_pretransfer}TCP 连接%{time_connect}；发出请求%{time_starttransfer}；"
				+ "总时间%{time_total}",url};
		HttpUtil.curl(cmds);
		
		System.out.println("========================url");
		HttpUtil.url(url);
		
		System.out.println("=======================scoket");
		HttpUtil.socket("www.baidu.com");
	}
}