import java.io.File;
import java.io.LineNumberReader;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Properties;

import edu.stanford.nlp.ling.HasWord;
import edu.stanford.nlp.ling.TaggedWord;
import edu.stanford.nlp.trees.Tree;

/**
 * Loads data in a TSV file into the src/webapp/data/corpus.sqlite database.
 * If this database does not exist, it will be created automatically by copying
 * from data/corpus-prototype.sqlite. If this database already exists, new
 * data will be added to it; and existing data remains without getting
 * deleted first. You will need to run Indexer to update secondary tables and
 * views for the web app to work properly.
 * 
 * Because this class uses the Stanford parser and segmenter libraries, which
 * consume a lot of memory, you'd need to give it a lot of RAM by setting
 * this JVM parameter:
 * 
 *   -Xmx2048m
 * 
 * @author dfhuynh
 */
public class Loader {
    final static private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
    
    public static void main(String[] args) {
        Properties params = Utilities.parseParams(args);
        
        String dataFilePath = params.getProperty("file");
        if (dataFilePath == null) {
            System.err.println(
                "Expected file= argument to be path to TSV data file with 6 columns:\n" +
                " - blog url\n" +
                " - blogger's gender\n" +
                " - post url\n" +
                " - level\n" +
                " - date in yyyy-MM-dd format\n" +
                " - title of post\n" +
                " - post body as plain text\n" +
                " - post body as HTML\n" +
                "Example of path to data file:\n" +
                "  data/raw-files/blog-posts-2012-01-03.tsv"
            );
            return;
        	
        }
        File dataFile = new File(dataFilePath);
        if (!dataFile.exists()) {
            System.err.println("Data file " + dataFilePath + " does not exist.");
            return;
        }
        
        int count = 0;
        try {
            NLPUtilities.initialize();
            DatabaseUtilities.initializeDatabase(
            	params.getProperty("db"),
            	params.getProperty("user"),
            	params.getProperty("password"),
            	"true".equals(params.getProperty("init")));
            
            long start = System.currentTimeMillis();
            final long batchSize = 10;
            
            LineNumberReader reader = FileUtilities.openFile(dataFile);
            
            List<String> row;
            while ((row = FileUtilities.nextRow(reader, '\t')) != null) {
                if (row.size() >= 7) {
                    processEntry(
                        row.get(0),
                        row.get(1),
                        row.get(2),
                        row.get(3),
                        row.get(4),
                        row.get(5),
                        row.get(6),
                        row.size() >= 8 ? row.get(7) : ""
                    );
                    
                    if (++count % batchSize == 0) {
                        double seconds = (System.currentTimeMillis() - start) / 1000;
                        double perBatch = Math.round(batchSize * seconds / count);
                        System.out.println(
                            "... processed " + count + " rows at " +
                            perBatch + " seconds per " + batchSize + " rows");
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
        }
        System.out.println("Processed " + count + " rows");
    }
    
    final static private void processEntry(
        String blogUrl, String bloggerGender, String postUrl, String level, String date, String title, String text, String html
    ) throws Exception {
        int prefix = blogUrl.indexOf("://");
        if (prefix >= 0) {
            blogUrl = blogUrl.substring(prefix + 3);
        }
        
        int dot = blogUrl.indexOf('.');
        String bloggerUsername = dot < 0 ? "<unknown>" : blogUrl.substring(0, dot);
        
        long bloggerId = DatabaseUtilities.getBloggerId(bloggerUsername, true, bloggerGender);
        long blogId = DatabaseUtilities.getBlogId(blogUrl, bloggerId, true);
        
        long time = 0;
        if (date != null && !date.isEmpty()) {
            try {
                time = sdf.parse(date).getTime() / 1000; // in seconds
            } catch (ParseException e) {
                // Ignore
            }
        }
        
        addPost(blogId, postUrl, time, title, level, text, html);
    }
    
    final static private void addPost(
        long blogId, String postUrl, long time, String title, String level, String text, String html
    ) throws Exception {
        final Connection db = DatabaseUtilities.db;
        
        // start transactions: db.exec("BEGIN TRANSACTION");
        
        final long postId = generateId();
        final int[] counts = new int[] { 0, 0, 0 }; // word, sentence, paragraph
        
        for (String paragraph : NLPUtilities.breakIntoParagraphs(text)) {
            paragraph = paragraph.trim();
            if (paragraph.length() == 0) {
                continue;
            }
            
            String segmentedText = NLPUtilities.segment(paragraph);
            NLPUtilities.parseSentences(
                segmentedText,
                new NLPUtilities.ParserCallback() {
                    int inParagraphIndex = 0;
                    
                    @Override
                    public void onSentence(int sentenceIndex, List<HasWord> sentence, Tree tree) throws Exception {
                        long sentenceId = generateId();
                        PreparedStatement stSentence = db.prepareStatement(
                            "INSERT INTO sentences" +
                            " (id, post_id, paragraph_index, in_post_index, in_paragraph_index, original_text," +
                            " word_count, segmented_text" +
                            (tree != null ?
                                ", tagged_text, tree)  VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" :
                                ")  VALUES(?, ?, ?, ?, ?, ?, ?, ?)"));
                        
                        stSentence.setLong(1, sentenceId);
                        stSentence.setLong(2, postId);
                        stSentence.setInt(3, counts[2]); // paragraph index
                        stSentence.setInt(4, counts[1]); // in-post sentence index
                        stSentence.setInt(5, sentenceIndex); // in-paragraph index
                        stSentence.setString(6, NLPUtilities.createOriginalText(sentence));
                        stSentence.setInt(7, sentence.size());
                        stSentence.setString(8, NLPUtilities.createSegmentedText(sentence));
                        if (tree != null) {
                            stSentence.setString(9, NLPUtilities.createTaggedText(tree));
                            stSentence.setString(10, tree.pennString());
                        }
                        stSentence.execute();
                        stSentence.close();
                        
                        if (tree != null) {
                            int wordIndex = 0;
                            for (TaggedWord taggedWord : tree.taggedYield()) {
                                PreparedStatement stWord = db.prepareStatement(
                                    "INSERT INTO words" +
                                    " (sentence_id, in_sentence_token_index, word, word_length, pos_tag)" +
                                    " VALUES(?, ?, ?, ?, ?)");
                                
                                stWord.setLong(1, sentenceId);
                                stWord.setLong(2, wordIndex++);
                                stWord.setString(3, taggedWord.word());
                                stWord.setInt(4, taggedWord.word().length());
                                stWord.setString(5, taggedWord.tag());
                                stWord.execute();
                                stWord.close();
                            }
                        }
                        inParagraphIndex++;
                        counts[0] += sentence.size(); // word count
                        counts[1]++; // sentence count
                    }
                }
            );
            counts[2]++; // paragraph count
        }
        
        PreparedStatement stPost = db.prepareStatement(
            "INSERT INTO posts" +
            " (id, blog_id, url, title, level, date, body_text, body_html," +
            " character_count, word_count, sentence_count, paragraph_count)" +
            " VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
        stPost.setLong(1, postId);
        stPost.setLong(2, blogId);
        stPost.setString(3, postUrl);
        stPost.setString(4, title);
        stPost.setString(5, level);
        stPost.setLong(6, time);
        stPost.setString(7, text);
        stPost.setString(8, html);
        stPost.setInt(9, text.length());
        stPost.setInt(10, counts[0]); // word
        stPost.setInt(11, counts[1]); // sentence
        stPost.setInt(12, counts[2]); // paragraph
        stPost.execute();
        stPost.close();
        
        //db.commit();
    }
    
    final static private long generateId() {
        return System.currentTimeMillis() * 1000000 +
            Math.round(1000000 * Math.random());
    }
}
