package com.gxljc.bear.crawler.itaogao;

import java.io.IOException;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.FlatMapFunction;
import org.springframework.data.mongodb.core.query.Criteria;
import org.springframework.data.mongodb.core.query.Query;
import org.springframework.data.mongodb.core.query.Update;

import com.gxljc.commons.mongo.cli.BaseMongoRepository;
import com.gxljc.commons.util.Pair;
import com.gxljc.commons.util.StringUtil;
import com.gxljc.bear.crawler.base.BaseMongoTools;
import com.gxljc.bear.crawler.base.DolphinCrawlerConf;
import com.gxljc.bear.crawler.base.DolphinCrawlerConsts;
import com.gxljc.bear.crawler.base.DolphinFetchData;
import com.gxljc.bear.crawler.itaogao.util.ImageUtil;
import com.gxljc.bear.crawler.util.DateUtil;
import com.gxljc.bear.crawler.util.HtmlUtil;
import com.gxljc.bear.crawler.util.MailUtil;
import com.gxljc.bear.crawler.util.SparkUtil;

public abstract class NewsBaseCrawler implements Serializable {

	private static final long serialVersionUID = 2573844488051480999L;
	private static final int CONTENT_MIN_SIZE = 80;
	protected static final String DEFAULT_DATE_FORMAT = "yyyy-MM/dd";
	protected static final Logger LOG = Logger.getLogger(NewsBaseCrawler.class);
	String collectionName;
	String dbName;
	protected Boolean proxy = false; // 初始值为false

	public void setProxy(boolean proxy) {
		this.proxy = proxy;
	}

	public NewsBaseCrawler(String collectionName, String dbName) {
		super();
		this.collectionName = collectionName;
		this.dbName = dbName;
	}

	public NewsBaseCrawler(String collectionName) {
		this(collectionName, DolphinCrawlerConsts.MongoDBName.ITAOGAO.getName());
	}

	public void startAll() {
		int cnt = 0;
		for (String seed : genSeeds()) {
			try {
				cnt += crawl(seed) ? 1 : 0;
			} catch (Exception e) {
				LOG.error(String.format("***crawler fail for id=%s", seed));
				e.printStackTrace();
			}
			if (cnt % 500 == 0) {
				LOG.info("cnt=" + cnt);
			}
		}
	}

	public boolean crawl(String seed) throws Exception {
		return save(genTable(seed));
	}

	// crawl with spark
	public void crawl() {
		crawl(genSeeds());
	}

	public void update() {
		long start = System.currentTimeMillis();
		int cnt = 0;
		List<String> seeds = genNewSeeds();
		if (!validateObject(seeds)) {
			LOG.info("------- gen new seeds fail. Sleeping for retry ------- ");
			try {
				Thread.sleep(5 * 1000);
			} catch (InterruptedException e) {
				e.printStackTrace();
			}
			setProxy(true);
			seeds = genNewSeeds();
		}
		for (String id : seeds) {
			try {
				cnt += save(genTable(id)) ? 1 : 0;
			} catch (Exception e) {
				LOG.error(String.format("***crawler fail for id=%s", id));
				e.printStackTrace();
			}
			if (cnt % 100 == 0) {
				LOG.info("cnt=" + cnt);
			}
		}
		long timeUsed = System.currentTimeMillis() - start;
		mailReport(cnt, timeUsed);
	}

	public void crawl(List<String> seeds) {
		int coreMax = 10;
		JavaSparkContext jsc = SparkUtil.createCommonsSparkContext("bear-" + this.collectionName + "-" + seeds.size(),
				coreMax, coreMax * 2, this.getClass());
		JavaRDD<String> seedsRDD = jsc.parallelize(new ArrayList<String>(seeds));
		long count = seedsRDD.mapPartitions(new FlatMapFunction<Iterator<String>, Integer>() {
			private static final long serialVersionUID = 1L;

			@Override
			public Iterable<Integer> call(Iterator<String> seeds) throws Exception {
				int successCnt = 0;
				while (seeds.hasNext()) {
					try {
						int ret = save(genTable(seeds.next())) ? 1 : 0;
						if (ret == 1)
							successCnt++;
						if (successCnt % 100 == 0) {
							LOG.info("successCnt=" + successCnt);
						}
					} catch (Exception er) {
						er.printStackTrace();
					}
				}
				return Arrays.asList(successCnt);
			}
		}).count();
		LOG.info("all count=" + count);
		jsc.stop();
	}

	/**
	 * @param url
	 * @return 新闻模板匹配结果（json格式）,无null返回值
	 * @throws Exception
	 */
	public Pair<String, byte[]> fetResult(String url, boolean proxy) {
		try {
			DolphinFetchData fetchData = DolphinFetchData
					.getInstance(DolphinCrawlerConsts.CrawlerChannelType.bear.getName());
			Pair<String, byte[]> pair = fetchData.getResult2Pair(url, proxy, ItaogaoConst.CRAWL_TIMEOUT);
			if (pair == null) {
				LOG.error("page result is empty url = " + url);
				return new Pair<>("","".getBytes());
			}
			return pair;
		} catch (Exception e) {
			LOG.error(String.format("url=%s fetResult error!", url));
		}
		return new Pair<>("","".getBytes());
	}

	/**
	 * 
	 * @param url
	 * @param proxy
	 * @return HTTP 获取源码
	 */
	public byte[] fetch(String url, boolean proxy) {
		DolphinFetchData fetchData = null;
		try {
			fetchData = DolphinFetchData.getInstance(DolphinCrawlerConsts.CrawlerChannelType.bear.getName());
		} catch (Exception e) {
			e.printStackTrace();
		}
		return fetchData.getHtml(url, proxy);
	}

	/**
	 * @param paras
	 * @return 以‘####’拼接
	 */
	public String genContent(List<String> paras) {
		StringBuffer sb = new StringBuffer();
		for (String content : paras) {
			if (sb.length() > 0)
				sb.append(DolphinCrawlerConsts.DEFAULT_LINE_BREAK_WORD);
			content = content.trim();
			while (content.startsWith(" ")) {
				content = content.substring(1);
			}
			sb.append(content);
		}
		return sb.toString();
	}

	/**
	 * 
	 * @param paras
	 * @param images
	 * @return 拼接 文字 和 图片
	 */
	public String genContent(List<String> paras, List<String> images) {
		String content = genContent(paras);
		if (!validateObject(images)) {
			return content;
		}
		StringBuffer sb = new StringBuffer();
		sb.append(content);
		if (validateObject(images)) {
			sb.append(DolphinCrawlerConsts.DEFAULT_LINE_BREAK_WORD);
		}
		for (String image : images) {
			sb.append(String.format(DolphinCrawlerConsts.DEFAULT_TEXT_IMAGE_FORMAT, image));
		}
		return sb.toString();

	}

	/**
	 * @param date
	 * @param format
	 *            i.e yyyy-MM-dd HH:mm:ss | yyyy-MM-dd
	 * @return
	 */
	public Date genDate(String date, String format) {
		return DateUtil.parseFormat(date, format);
	}

	protected Date extractDate(String url) {
		Pattern pattern = Pattern.compile("html/(.*?)/content_");
		Matcher matcher = pattern.matcher(url);
		String time = "";
		while (matcher.find()) {
			time = matcher.group(1);
		}
		if (!StringUtil.isNullOrEmpty(time)) {
			String format = time.contains("/") ? "yyyy-MM/dd" : "yyyy-MM-dd";
			return DateUtil.parseFormat(time, format);
		}
		return null;
	}

	protected abstract List<String> genSeeds();

	protected abstract List<String> genNewSeeds();

	protected abstract Pair<NewsBaseTable, byte[]> genTable(String url);

	@SuppressWarnings("rawtypes")
	public boolean save(Pair<NewsBaseTable, byte[]> pair) throws Exception {
		NewsBaseTable table = pair.first;
		LOG.info("data from url=" + table.getCrawlUrl()+"|title="+table.getTitle());
		byte[] htmlByte = pair.second;
		String content = table.getContent();
		if (content == null || content.length() < CONTENT_MIN_SIZE) {
			return false;
		}
		BaseMongoRepository repo = BaseMongoTools.getInstance(dbName);
		Query query = new Query();
		query.addCriteria(Criteria.where(DolphinCrawlerConsts.MONGODB_DEFAULT_ID).is(table.getId()));
		// if (repo.doExist(query, collectionName)) {
		// return false;
		// }
		Update update = new Update();
		Field[] fields = table.getClass().getDeclaredFields();
		for (int i = 0; i < fields.length; i++) {
			Object value = fields[i].get(table);
			if (value == null)
				continue;
			if (value instanceof List) {
				if (CollectionUtils.isEmpty((List) value))
					continue;
			}
			if (value instanceof String) {
				if (StringUtils.isEmpty((String) value))
					continue;
			}
			update.set(fields[i].getName(), value);
		}
		repo.upsert(query, update, collectionName);
		save2hbase(table, htmlByte);
		return true;
	}

	@SuppressWarnings("resource")
	protected boolean save2hbase(NewsBaseTable table, byte[] htmlByte) {
		try {
			Configuration conf = DolphinCrawlerConf.getInstance();
			HTable htable = new HTable(conf, ItaogaoConst.HBASE_TABLE_PAGE);
			String rowKey = table.getId();
			Put put = new Put(org.apache.hadoop.hbase.util.Bytes.toBytes(rowKey));
			if (htmlByte != null) {
				put.add(org.apache.hadoop.hbase.util.Bytes.toBytes(ItaogaoConst.HBASE_FAMILY_DATA),
						org.apache.hadoop.hbase.util.Bytes.toBytes(ItaogaoConst.SCHEMA_COLUMN_NAME.HTML.getValue()),
						htmlByte);
			}
			if (StringUtils.isNotEmpty(table.getTitle())) {
				put.add(org.apache.hadoop.hbase.util.Bytes.toBytes(ItaogaoConst.HBASE_FAMILY_DATA),
						org.apache.hadoop.hbase.util.Bytes.toBytes(ItaogaoConst.SCHEMA_COLUMN_NAME.TITLE.getValue()),
						org.apache.hadoop.hbase.util.Bytes.toBytes(table.getTitle()));
			}
			try {
				htable.put(put);
				return true;
			} catch (Exception e) {
				e.printStackTrace();
				return false;
			}

		} catch (IOException e) {
			e.printStackTrace();
			return false;
		}
	}

	/**
	 * @param format
	 * @return get date list with format{i.e. [yyyy-MM/dd|yy-MM-dd]}
	 */
	public List<String> genDatesTillNow(String format) {
		List<String> dates = new LinkedList<>();
		String initDay = "2016-01-01";
		Date initDate = DateUtil.parseFormat(initDay);
		String xxDay = DateUtil.dateFormat(initDate, format);
		String now = DateUtil.dateFormat(new Date(), format);
		dates.add(xxDay);
		while (!now.equals(xxDay)) {
			Date xxDate = DateUtil.parseFormat(xxDay, format);
			Date xDate = DateUtil.addCalendar(xxDate, 1);
			xxDay = DateUtil.dateFormat(xDate, format);
			dates.add(xxDay);
		}
		return dates;
	}

	/**
	 * @param o
	 * @return 验证Map, List, String 是否正常
	 */
	@SuppressWarnings("rawtypes")
	public boolean validateObject(Object o) {
		if (o instanceof Map) {
			return o != null && ((Map) o).size() > 0;
		}
		if (o instanceof List) {
			return o != null && ((List) o).size() > 0;
		}
		if (o instanceof String) {
			return o != null && ((String) o).length() > 0;
		}
		return o != null;
	}

	public boolean validateObjects(Object... list) {
		for (Object o : list) {
			if (!validateObject(o)) {
				return false;
			}
		}
		return true;
	}

	public String removeSuffix(String str) {
		return str.startsWith("./") ? str.substring(2) : str;
	}

	/**
	 * 
	 * @param images
	 *            新闻内容页 图片列表
	 * @return
	 */
	public List<String> imageConvertplatform(List<String> images) {
		if (!validateObject(images)) {
			return null;
		}
		try {
			return ImageUtil.imageConvertplatform(images);
		} catch (Exception e) {
			LOG.error("***covert error!");
			e.printStackTrace();
		}
		return null;
	}

	/**
	 * 
	 * @param image
	 *            新闻 缩略图
	 * @return
	 */
	public String imageConvertplatform(String image) {
		List<String> images = imageConvertplatform(Arrays.asList(image));
		return validateObject(images) ? images.get(0) : null;
	}

	/**
	 * 
	 * @param picUrl
	 *            ["http://baike.baidu.com/pic/5135.jpg","../../pic/3124.jpg"]
	 * @param domainUrl
	 *            [http://baike.baidu.com/people/star/3214]
	 * @return 组装pic
	 */
	public String pictureUrlPref(String picUrl, String domainUrl) {
		if (!validateObjects(picUrl, domainUrl)) {
			return null;
		}
		return HtmlUtil.pictureUrlPref(picUrl, domainUrl);
	}

	protected List<String> completeUrl(String format, String date, List<String> pageUrl) {
		List<String> list = new LinkedList<>();
		for (String s : pageUrl) {
			if (s.startsWith("content")) {
				list.add(String.format(format, date, s));
			} else if (s.startsWith("/Content")) { // "/Content/2016-9/18/N211807625790"
				list.add(String.format(format, s));
			}

		}
		return list;
	}

	/**
	 * 
	 * @param table
	 * @param images
	 * @return 图片抓取及转换 include[logoUrl,images]
	 */
	protected NewsBaseTable processImages(String url, NewsBaseTable table, List<String> images) {
		if (!validateObjects(url, table, images)) {
			return table;
		}
		List<String> newImages = new LinkedList<>();
		for (String image : images) {
			newImages.add(pictureUrlPref(image, url));
		}
		table.setImages(imageConvertplatform(newImages));
		table.setLogoUrl(imageConvertplatform(newImages.get(0)));
		return table;
	}

	/**
	 *
	 * @param prefix
	 * @param string
	 * @return 匹配的任意字符【ASCII+Chinese】
	 */
	protected String extractByPrefix(String prefix, String string) {
		// Pattern pattern = Pattern.compile("作者：([\u4E00-\u9FA5]+)");
		Pattern pattern = Pattern.compile(prefix + "([a-zA-Z0-9\u4E00-\u9FA5]+)");
		Matcher matcher = pattern.matcher(string);
		String time = "";
		while (matcher.find()) {
			time = matcher.group(1);
		}
		if (!StringUtil.isNullOrEmpty(time)) {
			return time;
		}

		return null;
	}

	/**
	 *
	 * @param count
	 *            成功抓取url数量
	 * @param timeUsed
	 *            总耗时
	 */
	private void mailReport(int count, long timeUsed) {
		LOG.info("sending E-mail");
		String subject = "Hasky News crawler";
		String platformName = DolphinCrawlerConf.getInstance().get(DolphinCrawlerConsts.PARAM_PLATFORM_NAME);
		if ("DEV".equals(platformName) || "A".equals(platformName)) {
			return;
		}
		String content = "Result:<br/>platform=%s fetching <strong>size=%s</strong> item(s) from %s with proxy=%s used total time=%s second(s)! ";
		MailUtil.sendMail(subject,
				String.format(content, platformName, count, this.collectionName, "" + proxy, "" + timeUsed / 1000));
	}

}
