package dst.ass1.nosql.impl;

import java.util.List;
import java.util.logging.Logger;

import javax.persistence.EntityManager;

import org.hibernate.Session;

import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBCollection;
import com.mongodb.DBObject;
import com.mongodb.Mongo;
import com.mongodb.util.JSON;

import dst.ass1.jpa.dao.DAOFactory;
import dst.ass1.jpa.dao.ITaskDAO;
import dst.ass1.jpa.model.ITask;
import dst.ass1.nosql.IMongoDbDataLoader;
import dst.ass1.nosql.MongoTestData;

/**
 * 1.5.a 
 * Storing workflow results in MongoDB
 * loop over all tasks and store a JSON object with the task output to MongoDB. Use the official MongoDB Java driver
 *
 */
public class MongoDbDataLoader implements IMongoDbDataLoader {
    private static final String DATABASE_NAME = "dst";
    public static final String COLLECTION_NAME = "TaskResult";

    private final EntityManager entityManager;

    public MongoDbDataLoader(EntityManager em) {
        this.entityManager = em;
    }

    private final static Logger LOGGER = Logger.getLogger(MongoDbDataLoader.class.getName());
    
    @Override
    public void loadData() throws Exception {
        
        LOGGER.info("loadData");
        
        Session session = entityManager.unwrap(Session.class);
        ITaskDAO taskDAO = new DAOFactory(session).getTaskDAO();
        List<ITask> tasks = taskDAO.findTasksForStatusFinishedStartandFinish(null, null);

        Mongo client = new Mongo();
        DB db = client.getDB(DATABASE_NAME);
        DBCollection collection = db.getCollection(COLLECTION_NAME);

        MongoTestData testData = new MongoTestData();

        collection.createIndex(new BasicDBObject("task_id", 1));  //as we will often retrieve documents by task id, you should add an index to speed up such queries

        LOGGER.info("loadData: "+tasks.size()+" Tasks found!");
        
        
		for (ITask task : tasks) {
			DBObject o = (DBObject) JSON.parse(testData.getStringData(task.getId().intValue()));
			
			BasicDBObject basic=new BasicDBObject("task_id", task.getId());				//create new BasicDBObject with task_id
			basic.append("last_updated",task.getTaskProcessing().getEnd().getTime());	//append last_updated
			
			basic.append(testData.getDataDesc(task.getId().intValue()),o); //append data from TestData with Description from TestData
			
			collection.insert(basic);  //insert the BasicDBObject
			
			LOGGER.info("loadData: inserted Task "+task.getId());
		}

//		  ------------ replaced with better version above ------------
//        for (ITask task : tasks) {			//for every task store the specific document with matching id
//        	String jsonString = String.format(
//                    "{ " +
//                    " \"task_id\" : %d, " +			
//                    " \"last_updated\" : %d, " +
//                    " \"%s\" : %s " +
//                    "}",
//                    task.getId(),
//                    task.getTaskProcessing().getEnd().getTime(), /* Weird, but that's what the test expects. */
//                    testData.getDataDesc(task.getId().intValue()),  //set the prop type that is afterwards counted with map reduce
//                    testData.getStringData(task.getId().intValue()));
//
//            DBObject o = (DBObject)JSON.parse(jsonString);  //parse to JSON Object
//            collection.insert(o);  //insert the JSON object
//            LOGGER.info("loadData: inserted Task "+task.getId());
//        }

        client.close();
    }

}