package com.muki.spider.core;

import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;

import org.apache.log4j.Logger;

/**
 * 分析获取url队列
 * 并将原文发送到数据仓库
 * @author ljh_2017
 *
 */
public class ContentMessageParser {

	private static final BlockingQueue<Map<String,String>> message = new ArrayBlockingQueue<>(1000);
	private static final Logger log = Logger.getLogger(ContentMessageParser.class);

	public static void parse(String uri,String context,String topic,int depth) {
		Map<String,String> property = new HashMap<>();
		property.put("url", uri);
		property.put("content", context);
		property.put("topic", topic);
		property.put("depth", String.valueOf(depth));
		try {
			if(property!=null) {
				message.put(property);
				log.info("send corpus of {"+uri+"} to the queue of topic {"+topic+"}");
			}
		} catch (InterruptedException e) {
			e.printStackTrace();
		}

	}

	public static Map<String,String> get() throws InterruptedException {
		return message.take();
	}

}
