package ca.cbc.gisdigester;

import ca.cbc.giscommon.dao.*;
import ca.cbc.giscommon.entity.*;
import ca.cbc.giscommon.util.HibernateUtil;
import ca.cbc.gisdigester.digester.EptStoryDigester;
import ca.cbc.gisdigester.digester.EventDigester;
import ca.cbc.gisdigester.digester.FeedContentDigester;
import ca.cbc.gisdigester.digester.TweetDigester;
import ca.cbc.gisdigester.publisher.Publisher;

import org.hibernate.HibernateException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;

public class Main {
    private static Logger logger = LoggerFactory.getLogger(Main.class);

    public static void digestAndSaveEptStory(final String[] fileNames) {
    	transactionTemplate(new HibernateCallback() {
    		public Serializable doHibernate() {
    			logger.info("Starting to digest and save ept stories.");
		        StoryDAO storyDAO = new StoryDAO();
		        for (String fileName : fileNames) {
		            storyDAO.saveOrUpdate(EptStoryDigester.instance.digest(fileName));
		        }
		        logger.info("Done digest and save etp stories, {} row affected.", fileNames.length);
		        return null;
		    }
    	});
    }
    
    public static void digestAndSaveFeedContent() {
    	List<Source> sources = null;	
		sources = (List<Source>) transactionTemplate(new HibernateCallback() {
    		public Serializable doHibernate() {
    			return (Serializable)new SourceDAO().listBySourceTypes(
    					new String[]{SourceType.event.toString(), SourceType.content.toString()});
    		}
    	});
		
		if(sources == null) {
			return;
		}
    	
		System.out.println(sources.size());
		
    	final ContentDAO contentDAO = new ContentDAO();
    	final EventDAO eventDAO = new EventDAO();
    	for (final Source source : sources) {
    		if (source.getSourceType().equalsIgnoreCase(SourceType.event.toString())) {	        
    	    	transactionTemplate(new HibernateCallback() {
    	    		public Serializable doHibernate() {
    	    			final List<Event> events = EventDigester.instance.digest(source);
		    			//clear all rows related to this source before insert
				        eventDAO.delete(source);		
				        int i = 0;
				        if (source.getDigestFlag()) {
					        //insert rows
					        for (Event e : events) {
					        	eventDAO.save(e);
					        	i++;
					        	if (i % 20 == 0) { //for hibernate batch insert
					        		HibernateUtil.flushAndClear();
					        	}
					        }
				        }
				        logger.info("Event feed saved for source {}, {} row affected.", source, i);
				        return null;
    	    		}
    	    	});
    		} else if (source.getSourceType().equalsIgnoreCase(SourceType.content.toString())) {
    	    	transactionTemplate(new HibernateCallback() {
    	    		public Serializable doHibernate() {
    	    			final List<Content> contents = FeedContentDigester.instance.digest(source);
		                //clear all rows related to this source before insert
		                contentDAO.delete(source);
		
		                int i = 0;
		                if (source.getDigestFlag()) {
		                    //insert rows
		                    for (Content content : contents) {
		                        contentDAO.save(content);
		                        i++;
		                        if (i % 20 == 0) { //for hibernate batch insert
		                            HibernateUtil.flushAndClear();
		                        }
		                    }
		                }
		                logger.info("3rd party feed saved for source {}, {} row affected.", source, i);
		                return null;
		            }
		        });
		    }
    	}
    }
    
    public static void digestAndSaveTweets() {
    	List<Source> sources = null;
		List<TwitterUser> users = null;
		
		sources = (List<Source>) transactionTemplate(new HibernateCallback() {
    		public Serializable doHibernate() {
    			return (Serializable) new SourceDAO().listBySourceType(SourceType.twitter.toString());
    		}
    	});
    	
		users = (List<TwitterUser>) transactionTemplate(new HibernateCallback() {
    		public Serializable doHibernate() {
    			return (Serializable) new TwitterUserDAO().list();
    		}
    	});
		
		if(users == null || sources == null) {
			return;
		}
		        
		HashMap<String, TwitterUser> userMapTmp = new HashMap<String, TwitterUser>();
		for (TwitterUser user : users) {
			userMapTmp.put(user.getScreenName().trim().toLowerCase(), user);
		}
		
		final HashMap<String, TwitterUser> userMap = new HashMap<String, TwitterUser>(userMapTmp);		
		final TweetDAO tweetDAO = new TweetDAO();
		
        HashMap<String, Tweet> tweets = null;
        for (final Source source : sources) {
            if (tweets == null)
                tweets = TweetDigester.instance.digest(source, userMap);
            else
                tweets.putAll(TweetDigester.instance.digest(source, userMap));
            //clear all rows related to this source before insert
            
	    	transactionTemplate(new HibernateCallback() {
	    		public Serializable doHibernate() {
		            tweetDAO.delete(source);
		            logger.info("Twitter feed digested and deleted for source {}.", source);
		            return null;
	    		}
	    	});
        }
        List<Tweet> subTweets = new ArrayList<Tweet>();
        int i = 0;
        for (Tweet t : tweets.values()) {
        	i++;
        	subTweets.add(t);
        	if(subTweets.size()== 10 || i == tweets.size()) {
        		final List<Tweet> finalSubTweets = new ArrayList<Tweet>(subTweets);
    	    	transactionTemplate(new HibernateCallback() {
    	    		public Serializable doHibernate() {
    	    			for(Tweet tweet : finalSubTweets) {
    	    				tweetDAO.saveOrUpdate(tweet);
    	    			}
    	    			return null;
    	    		}
    	    	});
    	    	
    	    	subTweets.clear();
        	}
        	logger.info("{} Twitters digested.", tweets.size());
        	
        }
    }

    public static void fetchAndPublishEptStoriesByAuthor() {
    	transactionTemplate(new HibernateCallback() {
    		public Serializable doHibernate() {
		        List<Story> stories = new ArrayList<Story>();
		        stories = new StoryDAO().listByOrder("lastUpdatedDate", false);
		
		        try {
		            Publisher.instance.publishEptStoryByAuthor(stories);
		        } catch (Exception e) {
		            logger.error("Error when publishing templates.", e);
		        }
		        return null;
    		}
    	});
    }

    public static void fetchAndPublishEvent() {
    	transactionTemplate(new HibernateCallback() {
    		public Serializable doHibernate() {
		        List<Event> events = new ArrayList<Event>();
		        events = new EventDAO().listByOrder("start", true);
		        try {
		            Publisher.instance.publishEventByCategory(events);
		        } catch (Exception e) {
		            logger.error("Error when publishing templates.", e);
		        }
		        return null;
    		}
    	});
    }

    public static void main(String[] args) {
        String usage = "Usage: java -jar localsrvextdigester.jar [ept|feed|event|twitter] [publish|digest|delete [filename]]";
        if (args.length < 2) {
            System.out.println(usage);
            return;
        }

        String type = args[0].trim();
        String publishOrDigest = args[1].trim();

        HibernateUtil.createSessionFactory();
        if (type.equalsIgnoreCase("ept")) {
            if (publishOrDigest.equalsIgnoreCase("publish")) {
                fetchAndPublishEptStoriesByAuthor();
            } else if (publishOrDigest.equalsIgnoreCase("digest") && args.length > 2) {
                String[] fileNames = new String[args.length - 2];
                for (int i = 2; i < args.length; i++) {
                    fileNames[i - 2] = args[i];
                }
                digestAndSaveEptStory(fileNames);
            } else if (publishOrDigest.equalsIgnoreCase("delete") && args.length > 2) {
                String[] fileNames = new String[args.length - 2];
                for (int i = 2; i < args.length; i++) {
                    fileNames[i - 2] = args[i];
                }
                removeEptStory(fileNames);
            } else {
                System.out.println(usage);
            }
        } else if (type.equalsIgnoreCase("feed")) {
            if (publishOrDigest.equalsIgnoreCase("publish")) {
                //fetchAndPublishFeedContent();
            } else if (publishOrDigest.equalsIgnoreCase("digest")) {
                digestAndSaveFeedContent();
            } else {
                System.out.println(usage);
            }
        } else if (type.equalsIgnoreCase("event")) {
            if (publishOrDigest.equalsIgnoreCase("publish")) {
                fetchAndPublishEvent();
            } else if (publishOrDigest.equalsIgnoreCase("digest")) {
                digestAndSaveFeedContent();
            } else {
                System.out.println(usage);
            }
        } else if (type.equalsIgnoreCase("twitter")) {
            if (publishOrDigest.equalsIgnoreCase("publish")) {
                //fetchAndPublishTweets();
            } else if (publishOrDigest.equalsIgnoreCase("digest")) {
                digestAndSaveTweets();
            } else {
                System.out.println(usage);
            }
        }
        HibernateUtil.shutdown();
    }

    public static void purgeEptStories() {
        logger.info("Starting to purge old ept stories.");
    	transactionTemplate(new HibernateCallback() {
    		public Serializable doHibernate() {
		        Calendar twoMonthAgo = Calendar.getInstance();
		        twoMonthAgo.add(Calendar.MONTH, -2);
		        StoryDAO storyDAO = new StoryDAO();
		        List<Story> stories = storyDAO.list();
		        int deleteCounter = 0;
		        for (Story story : stories) {
		            if (story.getLastUpdatedDate().before(twoMonthAgo.getTime())) {
		                storyDAO.delete(story);
		                deleteCounter++;
		            }
		        }
		        logger.info("Done purging old etp stories, {} row deleted.", deleteCounter);
		        return null;
    		}
    	});
    }

    public static void removeEptStory(final String[] fileNames) {
    	transactionTemplate(new HibernateCallback() {
    		public Serializable doHibernate() {
		        StoryDAO storyDAO = new StoryDAO();
		        for (String fileName : fileNames) {
		            storyDAO.delete(EptStoryDigester.instance.digest(fileName));
		        }
		        return null;
    		}
    	});
    }

    private static Serializable transactionTemplate(HibernateCallback callback) {
    	Serializable obj = null;
    	try {
			HibernateUtil.beginTransaction();
			obj = callback.doHibernate();
			HibernateUtil.commitTransaction();
		} catch (HibernateException e) {
			logger.error("HibernateException, transaction rollback!", e);
			HibernateUtil.rollbackTransaction();
		} catch (RuntimeException re) {
			logger.error("RunTimeException in transactionTemplate, transaction rollback!", re);
			HibernateUtil.rollbackTransaction();
		}
    	return obj;
    }
}
