package org.imixs.manik.webstat.ejb;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FilenameFilter;
import java.io.IOException;
import java.text.Format;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.logging.Logger;

import javax.ejb.Stateless;
import javax.ejb.TransactionAttribute;
import javax.ejb.TransactionAttributeType;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;

import org.imixs.manik.webstat.jpa.AccessEntity;
import org.imixs.manik.webstat.jpa.AccessLog;
import org.imixs.manik.webstat.jpa.Analyzer;
import org.imixs.manik.webstat.jpa.LogFile;

/**
 * EJB importing access log files.
 * 
 * The scan method reads the content of access log files defined by the
 * AccessLog object. Each log entry of the files will be imported into the
 * database as a AccessEntity.
 * 
 * @see method scan() for details about the implementation.
 * 
 * 
 *      thanks to: Sujit Pal
 *      http://sujitpal.blogspot.com/2009/06/some-access-log-parsers.html
 * 
 * @author rsoika
 * 
 */
@Stateless
public class AccessLogService {

	@PersistenceContext(unitName = "org.imixs.manik.webstat.jpa")
	EntityManager em;

	private static Logger logger = Logger.getLogger("org.imixs.manik.webstat");

	public void save(AccessLog aLog) throws Exception {

		// save Log now...
		AccessLog oldAccessLog = em.find(AccessLog.class, aLog.getId());
		if (oldAccessLog == null)
			em.persist(aLog);
		else
			em.merge(aLog);

	}

	public void remove(AccessLog aLog) throws Exception {

		logger.info("remove AccessLog ID=" + aLog.getId());
		AccessLog oldLog = em.find(AccessLog.class, aLog.getId());
		if (oldLog != null) {
			// drop all existing entries
			clear(oldLog);
			// now drop the entity
			em.remove(oldLog);
		} else
			logger.info("AcessLog object not found");

	}

	public List<AccessLog> getAccessLogs() {
		List<AccessLog> logs = new ArrayList<AccessLog>();
		Collection<AccessLog> col = null;
		try {
			Query q = em.createQuery("SELECT log FROM AccessLog log");
			col = q.getResultList();
			for (AccessLog aitem : col) {
				logs.add(aitem);
			}
		} catch (Exception ee) {
			ee.printStackTrace();
		}

		return logs;

	}

	/**
	 * Returns a list of LogFiles associated with the current AccessLog
	 * 
	 * @return
	 */
	public List<LogFile> getLogFiles(final AccessLog aLog) {
		List<LogFile> logs = new ArrayList<LogFile>();
		Collection<LogFile> col = null;
		try {
			Query q = em
					.createQuery("SELECT logfile FROM LogFile logfile WHERE logfile.accessLog.id='"
							+ aLog.getId() + "'");
			col = q.getResultList();
			for (LogFile aitem : col) {
				logs.add(aitem);
			}
		} catch (Exception ee) {
			ee.printStackTrace();
		}

		return logs;

	}

	/**
	 * Drops all existing AccessEntitys for a given AccessLog definition.
	 * 
	 * 
	 * @param aLog
	 * @throws Exception
	 */
	@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
	public void clear(final AccessLog aLog) throws Exception {
		try {
			logger.info(" clear AccessEntries for AccessLog ID=" + aLog.getId());
			Query q = em
					.createQuery("DELETE FROM AccessEntity ae WHERE ae.accessLog.id='"
							+ aLog.getId() + "'");
			q.executeUpdate();

			logger.info(" clear LogFiles for AccessLog ID=" + aLog.getId());
			q = em.createQuery("DELETE FROM LogFile ae WHERE ae.accessLog.id='"
					+ aLog.getId() + "'");
			q.executeUpdate();

			// Update AccessLog
			aLog.setTotalcount(0);
			aLog.setLastscan(null);
			em.merge(aLog);

		} catch (Exception ee) {
			ee.printStackTrace();
		}

	}

	/**
	 * Imports the server access log files defined by the AccessLog. The
	 * directory is specified by the 'path' property from the AccessLog object.
	 * The matching pattern is specified by the 'LogFile' property.
	 * 
	 * 
	 * @param aLog
	 * @throws Exception
	 */
	public void scan(final AccessLog aLog) throws Exception {

		logger.info("scan stated....");
		long lStartTime = System.currentTimeMillis();
		// update AccessLog
		save(aLog);

		// get lastScan time and compute new lastScanTime
		Calendar calLastScanTime = aLog.getLastscan();
		Calendar calNewScanTime = Calendar.getInstance();

		FilenameFilter filter = new FilenameFilter() {
			public boolean accept(File dir, String name) {
				return name.matches(aLog.getLogfile());

			}
		};

		logger.info(" file path= " + aLog.getPath());
		logger.info(" file pattern= " + aLog.getLogfile());
		Format formatter = new SimpleDateFormat("dd.MM.yyyy HH:mm:ss");
		if (aLog.getLastscan() != null && aLog.getLastscan().getTime() != null)
			logger.info(" last scan="
					+ formatter.format(calLastScanTime.getTime()));

		// get all log files matching the pattern
		File folder = new File(aLog.getPath());
		File[] fileArray = folder.listFiles(filter);
		// scan each file
		for (File aFile : fileArray)
			scanLogFile(aFile, aLog);

		// Update AccessLog
		aLog.setTotalcount(getCount(aLog));
		aLog.setLastscan(calNewScanTime);
		aLog.setLastscanduration((System.currentTimeMillis() - lStartTime));

		save(aLog);

		logger.info("scan finished successful in "
				+ (aLog.getLastscanduration() / 1000) + " sec");
	}

	/**
	 * This method reads the content of a access log file and creates a
	 * AccessEntity for each log entry. The informations about the access log
	 * file and the last scan will be stored into a LogFile Entity.
	 * 
	 * How can we make sure not to re-import the same entries?<br>
	 * 1.) We know the lastmodified date from the access log file (we stored
	 * this time during the last scan into a LogFile Entity). So we only scan
	 * the file if the last Modified date has changed.
	 * 
	 * 2.) Log Files often rotate. This means that a logfile which we still have
	 * scanned completely or in parts, will be renamed by an external logging
	 * service. So the logfile names can change over the time. <br />
	 * To make sure that we do not scan the same log entry twice from a rotated
	 * (renamed) logfile, we only import log entries when there date time stamp
	 * is newer than our last overall scan. The last scan is stored in the
	 * AccessLog Entity and will be updated if the complete scan was finished
	 * successful.
	 * 
	 * As this all runs in a transaction we did not store any data if anything
	 * goes completely wrong.
	 * 
	 * 
	 * The JUNK_SIZE is the count of entries when the entityManager will be
	 * flushed to save memory
	 * 
	 * 
	 * @throws IOException
	 */
	private void scanLogFile(File accessLogFile, AccessLog aLog)
			throws IOException {

		int MAX_JUNK_SIZE = 7000;
		int iJunkCount = 0;
		int iCount = 0;
		LogFile logFile = null;
		Calendar calFileLastModified = null;
		BufferedReader in = null;

		try {
			String sFileName = accessLogFile.getName();
			// first try to find the LogFile Entity
			logFile = this.findLogFile(aLog, sFileName);

			calFileLastModified = Calendar.getInstance();
			calFileLastModified.setTimeInMillis(accessLogFile.lastModified());
			Format formatter = new SimpleDateFormat("dd.MM.yyyy HH:mm:ss");
			logger.info("   scann: " + sFileName + " (last modified:"
					+ formatter.format(calFileLastModified.getTime()) + ")");

			// if the last modified date from this file is not after the last
			// modified timestamp
			// in our LogFile entry we can skip
			if (!calFileLastModified.after(logFile.getLastmodified())) {
				logger.info("   skip file scan (no update)");
				return;
			}

			in = new BufferedReader(new FileReader(accessLogFile));

			String str;
			while ((str = in.readLine()) != null) {
				// now create a new AccessEntity
				AccessEntity ae = createAccessEntity(str, aLog);

				/*
				 * logger.info("   EntityDate= " +
				 * formatter.format(ae.getDatetime().getTime()) );
				 * logger.info("   getLastscan= " +
				 * formatter.format(aLog.getLastscan().getTime()));
				 * logger.info("   scanTime= " +
				 * formatter.format(scanTime.getTime()) );
				 */

				// test if the Entry-Date is newer than the lastModified Date
				// form the LogFile (we will skip this entry because this entry
				// was added after we started our scan)
				if (ae.getDatetime().after(calFileLastModified))
					continue;

				// test if the Entry-Date is older than the last scan time
				// (we will skip this entry because we thing we should have read
				// this entry during our last scan)
				if (ae.getDatetime().before(aLog.getLastscan()))
					continue;

				// persist entry because it is newer than our last imported
				// entry.
				em.persist(ae);
				// update the lastEntryTime
				logFile.setLastentry(ae.getDatetime());
				iCount++;

				iJunkCount++;
				if (iJunkCount >= MAX_JUNK_SIZE) {
					em.flush();
					iJunkCount = 0;
					logger.info("   " + iCount + " entries scaanned....");
				}

			}
		} catch (Exception e) {
			logger.warning(" unable to scan file!" + e.getMessage());
			e.printStackTrace();
		} finally {
			if (in != null)
				in.close();
		}

		// now update the last modified date in our logFile entity.
		logFile.setLastmodified(calFileLastModified);

		logger.info("   " + iCount + " entries imported successfull");
	}

	/**
	 * This Method creates an AccessEntity for a formated AccessLog String.
	 * 
	 * The method knows the following common log formats:
	 * 
	 * 1=NCSA COMMON 2=NCSA COMBINED 3=Glassfish 3.1
	 * 
	 * 
	 * 
	 * @param logString
	 * @param aLog
	 * @return
	 */
	private AccessEntity createAccessEntity(String logString, AccessLog aLog) {

		List<String> fieldList = NcsaLogParser.parse(logString);

		AccessEntity accessEntity = new AccessEntity();

		accessEntity.setAccessLog(aLog);

		// ACCESS_FORMAT_COMMON
		if (aLog.getLogformat() == AccessLog.ACCESS_FORMAT_COMMON) {
			/**
			 * Example:
			 * 
			 * <code>
			 *  [0] = 100.123.145.12
				[1] = -
				[2] = -
				[3] = 19/Oct/2010:19:05:38 -0700
				[4] = GET /search?q1=foo&st=bar HTTP/1.1
				[5] = 200
				[6] = 323
		     * </code>
			 */
			accessEntity.setHost(fieldList.get(0));
			accessEntity.setRfc931(fieldList.get(1));
			accessEntity.setUsername(fieldList.get(2));
			accessEntity.setDatetime(NcsaLogParser.parseDateString(fieldList
					.get(3)));
			accessEntity.setRequest(fieldList.get(4));
			accessEntity.setStatuscode(fieldList.get(5));
			accessEntity.setBytes(fieldList.get(6));

		}
		if (aLog.getLogformat() == AccessLog.ACCESS_FORMAT_COMBINED) {
			/**
			 * Example:
			 * 
			 * <code>
			 *   [0] = 100.123.123.12
				 [1] = -
				 [2] = -
				 [3] = 19/Oct/2010:19:45:18 -0700
				 [4] = GET /search?q1=foo&st=bar HTTP/1.1
				 [5] = 200
				 [6] = 323
				 [7] = -
				 [8] = Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.14) Gecko/20080416 \Fedora/2.0.0.14-1.fc7 Firefox/2.0.0.14
		     * </code>
			 */
			accessEntity.setHost(fieldList.get(0));
			accessEntity.setRfc931(fieldList.get(1));
			accessEntity.setUsername(fieldList.get(2));
			accessEntity.setDatetime(NcsaLogParser.parseDateString(fieldList
					.get(3)));
			accessEntity.setRequest(fieldList.get(4));
			accessEntity.setStatuscode(fieldList.get(5));
			accessEntity.setBytes(fieldList.get(6));
			accessEntity.setReferrer(fieldList.get(7));
			accessEntity.setUseragent(fieldList.get(8));

		}
		// GLASSFISH 3.1
		if (aLog.getLogformat() == AccessLog.ACCESS_FORMAT_GLASSFISH31) {
			/**
			 * <code>
			 * Format: %client.name% %auth-user-name% %datetime% %request% %status% %response.length% 
			 * Example: "0:0:0:0:0:0:0:1" "NULL-AUTH-USER" "22/Jun/2011:22:14:52 +0100" "GET /manik-web-stat/ HTTP/1.1" 200 4399
			 * 
			 *  [0] = 0:0:0:0:0:0:0:1
				[1] = NULL-AUTH-USER
				[2] = 19/Oct/2010:19:05:38 -0700
				[3] = GET /search?q1=foo&st=bar HTTP/1.1
				[4] = 200 
				[5] = 323
		     * </code>
			 */
			accessEntity.setHost(fieldList.get(0));
			accessEntity.setUsername(fieldList.get(1));
			accessEntity.setDatetime(NcsaLogParser.parseDateString(fieldList
					.get(2)));
			accessEntity.setRequest(fieldList.get(3));
			accessEntity.setStatuscode(fieldList.get(4));
			accessEntity.setBytes(fieldList.get(5));

		}

		return accessEntity;
	}

	private long getCount(AccessLog alog) {
		Query query = em
				.createQuery("SELECT COUNT(ae) FROM AccessEntity ae WHERE ae.accessLog.id='"
						+ alog.getId() + "'");
		Number countResult = (Number) query.getSingleResult();
		logger.info(" totalcount=" + countResult);

		return countResult.longValue();

	}

	public AccessLog findAccessLog(long id) {
		Collection<AccessLog> col = null;
		Query q = em.createQuery("SELECT al FROM AccessLog al WHERE al.id='"
				+ id + "' ");
		col = q.getResultList();
		if (col.size() == 0) {
			return null;
		} else {
			return col.iterator().next();
		}

	}

	public LogFile findLogFile(AccessLog aLog, String sFileName) {
		LogFile logFile = null;
		Collection<LogFile> col = null;
		Query q = em
				.createQuery("SELECT logfile FROM LogFile logfile WHERE logfile.accessLog.id='"
						+ aLog.getId()
						+ "' AND logfile.name='"
						+ sFileName
						+ "'");
		col = q.getResultList();
		if (col.size() == 0) {
			// create a new one
			logFile = new LogFile();
			logFile.setAccessLog(aLog);
			logFile.setName(sFileName);
			// persist logfile
			em.persist(logFile);
		} else {
			logFile = col.iterator().next();
		}
		return logFile;

	}

	public class FileComparator implements Comparator<File> {
		@Override
		public int compare(File a, File b) {
			long lFileA = a.lastModified();
			long lFileB = b.lastModified();
			if (lFileA == lFileB)
				return 0;
			if (lFileA > lFileB)
				return 1;
			else
				return -1;
		}

	}

}
