package com.rsstuan.web.controller;


import java.io.IOException;

import javax.servlet.ServletInputStream;
import javax.servlet.http.*;

import org.mortbay.log.Log;

import com.rsstuan.crawl.ICrawler;
import com.rsstuan.crawl.data.CrawledData;
import com.rsstuan.crawl.impl.SimpleCrawler;
import com.rsstuan.subscribe.Subscriber;
import com.rsstuan.utils.IdUtils;
import com.rsstuan.utils.StringUtils;

@SuppressWarnings("serial")
public class PubSubServlet extends HttpServlet{

	ICrawler crawler;
	
	Subscriber subscriber;
	
	public PubSubServlet() {
		crawler = new SimpleCrawler();
		subscriber = new Subscriber();
	}
	public void doGet(HttpServletRequest request, HttpServletResponse response)
			throws IOException {
		if (request.getParameter("hub.challenge") != null) {
			String feedUrl = request.getParameter("fu");
			Long feedId;
			try {
				if (StringUtils.isBlank(feedUrl)) {
					feedId = IdUtils.generateFeedId(feedUrl);
				}
				else {
					response.getWriter().println("feed url is empty");
					return;
				}
			}
			catch (Exception e) {
				Log.warn("failed to parse feed url = " + feedUrl);
				return;
			}
			
			
			String challenge = request.getParameter("hub.challenge");
			response.setStatus(HttpServletResponse.SC_OK);
			response.getWriter().println(challenge);
			
			Log.info("doGet with feedUrl = " + feedUrl + " and hub.challenge = " + challenge);
			
			
			subscriber.subscribe(feedUrl);

		}

	}
	
	public void doPost(HttpServletRequest request, HttpServletResponse response) {
		try {
			String feedUrl = request.getParameter("fu");
			Long feedId;
			try {
				if (StringUtils.isBlank(feedUrl)) {
					feedId = IdUtils.generateFeedId(feedUrl);
				}
				else 
					return;
			}
			catch (Exception e) {
				Log.warn("failed to parse feed url = " + feedUrl);
				return;
			}
				
			ServletInputStream stream = request.getInputStream();
			byte[] data = new byte[request.getContentLength()];
			stream.read(data, 0, request.getContentLength());
			
			CrawledData crawledData = new CrawledData();
			crawledData.setData(data);
			crawledData.setCrawleDataTime(System.currentTimeMillis());
			crawledData.setFeedId(feedId);
			crawledData.setFeedUrl(feedUrl);
			crawler.doSave(crawledData);
			
			Log.info("doPost save data finished");
			
			
			
			
		} catch (IOException e) {
			
		}
		
	}
}
