package com.doculibre.constellio.connector.twitter;

import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
import twitter4j.Status;
import twitter4j.TwitterException;
import com.doculibre.constellio.connector.persistence.services.PersistenceUtils;
import com.doculibre.constellio.connector.persistence.services.TweetServices;
import com.doculibre.constellio.connector.twitter.api.TwitterAPIUtils;
import com.doculibre.constellio.connector.persistence.config.PersistenceContext;
import com.doculibre.constellio.connector.persistence.model.TweetDoc;
import com.doculibre.constellio.connector.documents.RemoveDocument;
import com.doculibre.constellio.connector.documents.SmartDocument;
import com.google.enterprise.connector.spi.Document;
import com.google.enterprise.connector.spi.DocumentList;
import com.google.enterprise.connector.spi.RepositoryException;
import com.google.enterprise.connector.spi.SpiConstants;
import com.google.enterprise.connector.spi.TraversalManager;

/**
 * 
 * Twitter Traversal Manager
 * 
 * @author France Labs
 * 
 */
public class TwitterTraversalManager implements TraversalManager {

	private static final Logger LOG = Logger.getLogger(TwitterTraversalManager.class
			.getName());

	private final static int RECOMMENDED_HINT = 100;
	private final static int MAX_HINT = 100000;
	private TweetServices tweetServices;

	// The recommended number of documents to return in each batch.
	private int batchHint = RECOMMENDED_HINT;
	
	
	private String twitterAccount;
	private PersistenceContext persistenceContext;

	public TwitterTraversalManager(PersistenceContext persistenceContext,
			String twitterAccount) {
		this.persistenceContext = persistenceContext;
		this.twitterAccount = twitterAccount;
		this.tweetServices = new TweetServices(TweetDoc.class,
				persistenceContext);
	}

	// Set the recommended number of documents to return in next batch.
	public void setBatchHint(int hint) throws RepositoryException {
		if (hint < 0)
			throw new RepositoryException(
					"BatchHint must be a positive integer");
		else if (hint == 0)
			batchHint = RECOMMENDED_HINT;
		else if (hint > MAX_HINT)
			batchHint = MAX_HINT;
		else
			batchHint = hint;
	}

	// Start the traversal from the logical beginning of the repository.
	public DocumentList startTraversal() throws RepositoryException {
		return traverse("0");
	}

	// Resume traversal from the saved checkpoint position in the repository.
	public DocumentList resumeTraversal(String checkpoint) {
		// Resume traversing at the next document after the last one returned.
		return traverse(checkpoint);
	}

	/**
	 * 
	 * Retrieve from the Twitter API "batchHint" Tweets that are older than ID "checkpoint"
	 * A list of Tweet Documents is created
	 * A Tweet Document contains different properties that will be used by Constellio for the indexing
	 * 
	 * @param checkpoint maximum ID of the retrieved Tweet
	 * @return the list of Tweet Documents
	 */
	private DocumentList traverse(String checkpoint) {

		long checkpointL;
		String nextCheckpoint = null;
		List<Status> tweets = null;
		List<Document> docList = new ArrayList<Document>();

		try {
			checkpointL = Long.parseLong(checkpoint);
		} catch (NumberFormatException e) {
			checkpointL = 0L;
		}

		if (checkpointL == 0L) {
			LOG.info("Traversal started on Twitter account " + twitterAccount);
		} else {
			LOG.info("Traversal started on Twitter account " + twitterAccount
					+ " of tweets older than ID " + checkpointL);
		}

		try {
			// Get "batchHint" tweets of the twitterAccount that are newer than the ID "checkpointL"
			tweets = TwitterAPIUtils.getTweets(twitterAccount, checkpointL,
					batchHint);
			

			LOG.info(tweets.size()
					+ " Tweets has been retreived for Twitter account "
					+ twitterAccount);

			// if we retrieve some Tweets from the API
			if (!tweets.isEmpty()) {
				
				// get the range of IDs of the Tweets we retreived
				long minID = tweets.get(tweets.size() - 1).getId();
				long maxID = tweets.get(0).getId();
				

				if (checkpointL == 0L)
					maxID = 0L;
				
				// get from database the Tweets included in this range that has already been indexed
				List<Long> alreadyIndexedTweetIDs = tweetServices
						.getListIDTweets(minID, maxID);

				PersistenceUtils.beginTransaction(persistenceContext);

				// For each Tweet retreived from the API
				for (Status tweet : tweets) {
					Long currentID = tweet.getId();
					
					// if the Tweet has already been indexed, we skip the document
					if (alreadyIndexedTweetIDs.contains(currentID)) {
						LOG.info("Skip Document " + currentID
								+ " - already indexed");
						alreadyIndexedTweetIDs.remove(currentID);
					} else {
						// we add the tweet to the database of already indexed tweets
						persistTweet(tweet);
						// we create a Tweet Document from the Tweet and add it to the list
						docList.add(createDocumentFromTweet(tweet));
					}
				}
				
				// Remove tweet
				for (Long toBeRemovedTweetID : alreadyIndexedTweetIDs){
					LOG.info("Remove Document " + alreadyIndexedTweetIDs);
					TweetDoc toBeRemoved = tweetServices.get(toBeRemovedTweetID);
					tweetServices.makeTransient(toBeRemoved);
					docList.add(new RemoveDocument(Long.toString(toBeRemovedTweetID)));
				}
				
				
				// set checkpoint for next traversal (corresponding to the ID of the oldest Tweet indexed)
				nextCheckpoint = Long.toString(minID);
				PersistenceUtils.commitTransaction(persistenceContext);
				
			}
			

			// If we retreive less Tweets than the batchHint, that means that we have finished to retrieve all the Tweets, set the next checkpoint to 0
			if (tweets.size()<batchHint)
				nextCheckpoint = "0";
			
		} catch (TwitterException e) {
			e.printStackTrace();
			// if something fails during the retrieval, keep the same checkpoint for next traversal
			nextCheckpoint = checkpoint;
		}
		

		return new TwitterDocumentList(docList, nextCheckpoint);

	}

	/**
	 * 
	 * Create a Tweet Entity (that only contains the ID from a Tweet and persist in the DB
	 * 
	 * @param tweet
	 */
	private void persistTweet(Status tweet) {
		TweetDoc tweetDoc = new TweetDoc();
		tweetDoc.setId(tweet.getId());
		tweetServices.makePersistent(tweetDoc);
	}

	/**
	 * 
	 * Create a Tweet Document from a Tweet. This document will contains properties that will
	 * be used by Constellio for the indexing
	 * 
	 * @param tweet
	 * @return
	 */
	private SmartDocument createDocumentFromTweet(Status tweet) {

		Map<String, List<Object>> properties;
		properties = new HashMap<String, List<Object>>();
		properties.put(SpiConstants.PROPNAME_DOCID,
				Collections.singletonList((Object) tweet.getId()));
		GregorianCalendar now = new GregorianCalendar();
		now.setTime(tweet.getCreatedAt());
		properties.put(SpiConstants.PROPNAME_LASTMODIFIED,
				Collections.singletonList((Object) now));
		properties.put(SpiConstants.PROPNAME_CONTENT,
				Collections.singletonList((Object) tweet.getText()));
		properties.put(SpiConstants.PROPNAME_TITLE,
				Collections.singletonList((Object) twitterAccount));

		try {
			properties.put(SpiConstants.PROPNAME_DISPLAYURL, Collections
					.singletonList((Object) TwitterAPIUtils.getTwitterURL(
							twitterAccount).toString()));
		} catch (MalformedURLException e) {
			e.printStackTrace();
		}

		return new SmartDocument(properties);

	}

}
