﻿/*
 * Copyright (C) 2009 Gentea Alexandru <in.10se@yahoo.com>
 * 
 *
 * This file is part of WordNet::SQLConverter
 * See http://wnsqlconverter.codeplex.com/ for details.
 *
 * WordNet::SQLConverter is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License as published by
 * the Free Software Foundation; either version 2 of the License, or
 * (at your option) any later version.
 *
 * WordNet::SQLConverter is distributed in the hope that it will be useful,
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 * GNU General Public License for more details.
 *
 * You should have received a copy of the GNU General Public License
 * along with this program; if not, write to the Free Software
 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
 */
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.IO;
using System.Text.RegularExpressions;
using GDALib;
using NLog;

namespace WordNet.SQLConverter
{
    class SQLConverter
    {
        static String[] files = { "noun", "verb", "adj", "adv"}; //1,2,3,4
        const int verbFileIndex = 1;
        const int adjFileIndex = 2;
        static String[] verbSentenceFiles = { "sents.vrb", "frames.vrb" };
        static String senseFile = "index.sense";
        static Regex compoundWordRegex;
        static Regex senseKeyRegex;
        String wordNetPath, verbNetPath;
        StreamReader fileReader;
        DatabaseAccessor dbAccessor;
        Logger logger;
        Dictionary<String, String> relationTypes;
        const int relationTypesCount = 26;
        BackgroundWorker workerThread;
        DoWorkEventArgs eventArgs;
        int progress;
        int errorCount;
        const int maxErrors = 50;

        public int ErrorCount
        {
            get { return errorCount; }
        }

        static SQLConverter()
        {
            compoundWordRegex = new Regex("_|-", RegexOptions.Compiled);
            senseKeyRegex = new Regex(@"(\d+):(\d+):", RegexOptions.Compiled);
        }

        public SQLConverter(DatabaseAccessor dbAccessor, String wordNetPath, String verbNetPath, 
            BackgroundWorker workerThread, DoWorkEventArgs eventArgs)
        {
            this.wordNetPath = wordNetPath;
            this.verbNetPath = verbNetPath;
            this.dbAccessor = dbAccessor;
            this.workerThread = workerThread;
            this.eventArgs = eventArgs;
            logger = LogManager.GetCurrentClassLogger();
            relationTypes = new Dictionary<String, String>(relationTypesCount);
            progress = 5;
            errorCount = 0;
            dbAccessor.BulkExecError += new GDALib.ErrorEventHandler(OnBulkExecError);
            dbAccessor.AsyncExecError += new GDALib.ErrorEventHandler(OnAsyncExecError);
        }

        void OnBulkExecError(object sender, DatabaseAccessorEventArgs e)
        {
            logger.Error("Fatal BulkExec error. Aborting...");
            errorCount++;
            workerThread.CancelAsync();
        }

        void OnAsyncExecError(object sender, DatabaseAccessorEventArgs e)
        {
            errorCount++;
            if(errorCount > maxErrors)
                workerThread.CancelAsync();
        }

        public bool ConvertToSQL(String dbName)
        {
            Initialize();
            bool result = true;

            //try to parse all files
            //if there were any failures drop the database
            if (!FillRelationTypesDictionary() ||
                !ParseLexnames() ||
                !ParseIndexes() ||
                !ParseExceptionFiles() ||
                !ParseVerbSentenceFiles() ||
                !ParseDataFiles() ||
                !ParseSenseFile() ||
                !ParseSentidx())
            {
                result = false;
                //in case of failure, clean up the sql connections and commands
                //and drop the database
                dbAccessor.EndBulkExec(true);
                dbAccessor.EmptyCache();
                dbAccessor.ConnectionString = Regex.Replace(dbAccessor.ConnectionString, "Initial Catalog=[^;]+;", "Initial Catalog=Master;");
                dbAccessor.ExecuteNonQueryAsync(String.Format(
                    @"ALTER DATABASE {0} SET SINGLE_USER WITH ROLLBACK IMMEDIATE;
                      DROP DATABASE {0};", dbName));
            }
            else
            {
                RebuildIndexes();
                ExecuteFixes();
            }
            
            workerThread.ReportProgress(progress, "Finishing database operations...");
            dbAccessor.EmptyCache();
            dbAccessor.WaitForAsyncQueries();
            dbAccessor.ClearAllConnectionPools();
            logger.Debug(String.Format("Finished conversion with {0} errors.", errorCount));
            
            return result;
        }

        private void Initialize()
        {
            dbAccessor.CacheCommand("GetWordId", "SELECT [WordId] FROM Words WHERE [Lemma] = @Lemma",
                SqlCommandType.Scalar);
        }

        private bool FillRelationTypesDictionary()
        {
            if (workerThread.CancellationPending)
            {
                eventArgs.Cancel = true;
                return false;
            }
            workerThread.ReportProgress(progress, "Caching relation types...");

            System.Data.DataTable dt = dbAccessor.ExecuteReader("SELECT [RelationTypeId], [Symbol] FROM RelationTypes");

            foreach(System.Data.DataRow row in dt.Rows)
            {
                relationTypes.Add(row["Symbol"].ToString(), row["RelationTypeId"].ToString());
            }

            dt.Dispose();
            progress += 2;
            return true;
        }

        private void RebuildIndexes()
        {
            dbAccessor.ExecuteNonQueryAsync("ALTER INDEX ALL ON Categories REBUILD;");
            dbAccessor.ExecuteNonQueryAsync("ALTER INDEX ALL ON Exceptions REBUILD;");
            dbAccessor.ExecuteNonQueryAsync("ALTER INDEX ALL ON PartsOfSpeech REBUILD;");
            dbAccessor.ExecuteNonQueryAsync("ALTER INDEX ALL ON Relations REBUILD;");
            dbAccessor.ExecuteNonQueryAsync("ALTER INDEX ALL ON RelationTypes REBUILD;");
            dbAccessor.ExecuteNonQueryAsync("ALTER INDEX ALL ON SenseSentenceAssociations REBUILD;");
            dbAccessor.ExecuteNonQueryAsync("ALTER INDEX ALL ON Sentences REBUILD;");
            dbAccessor.ExecuteNonQueryAsync("ALTER INDEX ALL ON Synsets REBUILD;");
        }

        public bool ParseIndexes()
        {
            String line = String.Empty;
            String[] parts;
            int wordCount;
            String filePath, fileName;

            if (!dbAccessor.BeginBulkExecTrans("INSERT INTO Words([Lemma],[WordCount]) VALUES(@Lemma,@WordCount);",
                System.Data.IsolationLevel.ReadCommitted, true))
                return false;
            
            //process each index file
            for (int i = 0; i < files.Length; i++)
            {
                fileName = "index." + files[i];
                filePath = Path.Combine(wordNetPath, fileName);

                try
                {
                    //read file
                    fileReader = new StreamReader(filePath);
                    workerThread.ReportProgress(progress, "Extracting word lemmas from " + fileName + "...");
                    progress += 2;

                    while (!fileReader.EndOfStream)
                        try
                        {
                            //we stop the operation if we get the cancel signal
                            if (workerThread.CancellationPending)
                            {
                                fileReader.Close();
                                dbAccessor.EndBulkExec(true);
                                eventArgs.Cancel = true;
                                return false;
                            }

                            line = fileReader.ReadLine();

                            //skip comments
                            if (line.StartsWith("  "))
                                continue;

                            parts = line.Split(' ');

                            //compute how many words each lemma has
                            wordCount = 1;
                            wordCount += (byte)compoundWordRegex.Matches(parts[0]).Count;

                            //[Lemma],[WordCount]
                            dbAccessor.EnqueForBulkExec(parts[0], wordCount.ToString());
                            //word.PosID = GetPOSId(parts[1]);

                            //synset_cnt
                            //if (int.TryParse(parts[2], out synsetCount) == false)
                            //    continue;

                            ////ptr_cnt
                            //if (int.TryParse(parts[3], out j))
                            //    j += 4;
                            //else continue;

                            ////parts[j] -> synset_count = sense_count
                            ////parts[j+1] -> tagsense_count
                            //synsetCount = Math.Min(j + 2 + synsetCount, parts.Length);
                            //newSynsetStatement = "";
                            //for (int k = j + 2; k < synsetCount; k++)
                            //{
                            //    cmd.CommandText = "INSERT INTO Synsets (SynsetId) VALUES(" +
                            //        (i+1).ToString() + parts[k] + ");";
                            //}

                            //dbAccessor.ExecuteNonQueryAsync(sqlStatement, "ParseIndexes()");
                        }
                        catch (Exception e)
                        {
                            logger.Error("Couldn't process row in {0} -> {1}. Error: {2}", fileName, line, e.Message);
                        }

                    fileReader.Close();
                }
                catch (Exception e)
                {
                    logger.Error("Couldn't parse " + filePath + ". Error: " + e.Message);

                    if (fileReader != null)
                        fileReader.Close();

                    return false;
                }
            }

            workerThread.ReportProgress(progress, "Finishing database operations...");
            dbAccessor.EndBulkExec(false);

            if (workerThread.CancellationPending)
            {
                eventArgs.Cancel = true;
                return false;
            }
            //rebuild the indexes immediately to improve the performance of the queries on the Words table that will follow
            dbAccessor.ExecuteNonQueryAsync("ALTER INDEX ALL ON Words REBUILD;");
            progress += 12;
            return true;
        }

        byte GetPOSId(String pos)
        {
            switch (pos)
            {
                case "n": return 1;
                case "v": return 2;
                case "a": return 3;
                case "r": return 4;
                case "s": return 5;
                default: return 0;
            }
        }

        void CleanUp()
        {
            dbAccessor.ExecuteNonQuery(@"DELETE FROM Categories;
                                DELETE FROM PartsOfSpeech;
                                DELETE FROM Relations;
                                DELETE FROM RelationTypes;
                                DELETE FROM Senses;
                                DELETE FROM Synsets;
                                DELETE FROM Words;");
        }

        public bool ParseLexnames()
        {
            String line = String.Empty;
            String[] parts;
            String sqlStatement;

            
            try
            {
                //read file
                fileReader = new StreamReader(Path.Combine(wordNetPath, "lexnames"));
                workerThread.ReportProgress(progress, "Parsing lexnames...");

                while (!fileReader.EndOfStream)
                    try
                    {
                        //we stop the operation if we get the cancel signal
                        if (workerThread.CancellationPending)
                        {
                            fileReader.Close();
                            eventArgs.Cancel = true;
                            return false;
                        }
                        

                        line = fileReader.ReadLine();

                        //skip comments
                        if (line.StartsWith("  "))
                            continue;

                        parts = line.Split('\t');
                        if (parts.Length < 3)
                            continue;

                        //insert the values into the Categories table
                        sqlStatement = String.Format("INSERT INTO Categories([CategoryId], [Name], [PosId]) VALUES({0},'{1}',{2});",
                            parts[0], parts[1].Replace("'", "''"), parts[2]);

                        dbAccessor.ExecuteNonQueryAsync(sqlStatement);
                    }
                    catch (Exception e)
                    {
                        logger.Error("Failed to parse line: " + line + " Error: " + e.Message);
                    }

                fileReader.Close();
            }
            catch (Exception e)
            {
                logger.Error("Failed to parse lexnames. Error: " + e.Message);

                if (fileReader != null)
                    fileReader.Close();

                return false;
            }

            progress += 3;
            return true;
        }

        public bool ParseDataFiles()
        {
            String line = String.Empty;
            String[] parts;
            String synsetId, targetSynsetId;
            int j, i, wordStopIndex, ptrStartIndex, ptrStopIndex;
            int pos, indexWithinSynset;
            String sourceWordId, targetWordId, typeId, lemma;
            object marker, glossary;
            String fileName, filePath;
            int sourceWordIndex, targetWordIndex, glossaryPos, frameStopIndex;
            object result;

            //Each data file must be read twice. The first time we populate the Synsets and Senses tables,
            //and the second time we add relations between the Synsets in the Relations table

            //prepare the database accesor for bulk insert and cache other commands to increase speed
            if (!dbAccessor.BeginBulkExecTrans(
                "INSERT INTO Senses([WordId],[SynsetId],[Index],[Marker]) VALUES(@WordId,@SynsetId,@Index,@Marker);",
                 System.Data.IsolationLevel.ReadCommitted, true))
                return false;
            dbAccessor.CacheCommand("NewSynset",
                "INSERT INTO Synsets([SynsetId],[CategoryId],[PosId],[Glossary]) VALUES(@SynsetId,@CategoryId,@PosId,@Glossary);",
                 SqlCommandType.NonQuery);
            dbAccessor.CacheCommand("WIdFmSenses", "SELECT [WordId] FROM [Senses] WHERE [SynsetId]=@SynsetId AND [Index]=@Index",
                 SqlCommandType.Scalar);

            //process each data file
            for (i = 0; i < files.Length; i++)
            {
                fileName = "data." + files[i];
                filePath = Path.Combine(wordNetPath, fileName);

                try
                {
                    fileReader = new StreamReader(filePath);
                    workerThread.ReportProgress(progress, "Extracting synsets and senses from " + fileName + "...");
                    progress += 3;

                    while (!fileReader.EndOfStream)
                        try
                        {
                            //DATA FILE FORMAT:
                            //synset_offset  lex_filenum  ss_type    w_cnt  word    lex_id  [word   lex_id...]  p_cnt       [ptr...]  [frames...]  |   gloss  
                            //decimal(8)     decimal(2)   n|v|a|s|r  hex(2) string  hex(1)   string hex(1)      decimal(3)                              string
                            //ptr -> pointer_symbol  synset_offset  pos  source/target  
                            //frame -> f_cnt    +    f_num  w_num  [  +    f_num  w_num...]  

                            //we stop the operation if we get the cancel signal
                            if (workerThread.CancellationPending)
                            {
                                fileReader.Close();
                                dbAccessor.EndBulkExec(true);
                                eventArgs.Cancel = true;
                                return false;
                            }
                            line = fileReader.ReadLine();

                            //skip comments
                            if (line.StartsWith("  "))
                                continue;

                            //get the glossary text if it exists
                            glossaryPos = line.IndexOf('|');
                            if (glossaryPos < 0)
                            {
                                parts = line.Split(' ');
                                glossary = DBNull.Value;
                            }
                            else
                            {
                                parts = line.Substring(0, glossaryPos).Split(' ');
                                glossary = line.Substring(glossaryPos + 1, line.Length - glossaryPos - 1).Trim();
                            }

                            //insert the new synset into the Synsets table
                            synsetId = (i + 1).ToString() + parts[0];
                            //[SynsetId],[CategoryId],[PosId],[Glossary]
                            dbAccessor.ExecuteCachedCommand("NewSynset", out result, synsetId, parts[1],
                                GetPOSId(parts[2]), glossary);

                            //insert an entry into the Senses table for each word that belongs to this synset
                            //wordStartIndex = 4
                            //wordStopIndex = wordStartIndex + word_cnt * 2
                            if (int.TryParse(parts[3], System.Globalization.NumberStyles.HexNumber, null, out j))
                            {
                                wordStopIndex = j * 2 + 4;
                                indexWithinSynset = 0;

                                for (j = 4; j < wordStopIndex; j+=2)
                                {
                                    indexWithinSynset++;
                                    //check if we are processing data.adj
                                    if (i != adjFileIndex)
                                    {
                                        lemma = parts[j];
                                        marker = DBNull.Value;
                                    }
                                    else
                                    {
                                        //if so, check for markers
                                        pos = parts[j].IndexOf('(');

                                        if (pos != -1)
                                        {
                                            lemma = parts[j].Substring(0, pos);
                                            marker = parts[j].Substring(pos + 1, parts[j].Length - pos - 2);
                                        }
                                        else
                                        {
                                            lemma = parts[j];
                                            marker = DBNull.Value;
                                        }
                                    }

                                    dbAccessor.ExecuteCachedCommand("GetWordId", out result, lemma);
                                    if (result != null)
                                    {
                                        //[WordId],[SynsetId],[Index],[Marker]
                                        dbAccessor.EnqueForBulkExec(result.ToString(), synsetId, 
                                            indexWithinSynset.ToString(), marker);
                                    }
                                    else
                                        logger.Error("WNError: Failed to get WordId for " + lemma);
                                }
                            }
                            else
                            {
                                logger.Error("WNError: Failed to get w_cnt from line: " + line);
                                continue;
                            }
                        }
                        catch (Exception e)
                        {
                            logger.Error("Failed to parse line: " + line + " Error: " + e.Message);
                        }

                    fileReader.Close();
                }
                catch (Exception e)
                {
                    logger.Error("Failed to parse " + filePath + " Error: " + e.Message);

                    if (fileReader != null)
                        fileReader.Close();

                    return false;
                }
            }

            //wait for the previous bulk insert operations to finish 
            //and prepare the database accesor to bulk insert data into the Relations table
            workerThread.ReportProgress(progress, "Finishing database operations...");
            dbAccessor.EndBulkExec(false);

            if (workerThread.CancellationPending)
            {
                eventArgs.Cancel = true;
                return false;
            }
            //rebuild the indexes on the Sense table to improve the performance of the queries that will be executed on this table
            dbAccessor.ExecuteNonQueryAsync("ALTER INDEX ALL ON Senses REBUILD;");
            progress += 8;
            if (!dbAccessor.BeginBulkExecTrans(
                @"INSERT INTO Relations ([SourceWordId],[SourceSynsetId],[TargetWordId],[TargetSynsetId],[TypeId]) 
                            VALUES(@SourceWordId,@SourceSynsetId,@TargetWordId,@TargetSynsetId,@TypeId)",
                System.Data.IsolationLevel.ReadCommitted, true))
                return false;
            List<String> wordIds = null;

            for (i = 0; i < files.Length; i++)
            {
                fileName = "data." + files[i];
                filePath = Path.Combine(wordNetPath, fileName);

                //if we are reading the verb file cache the insert frame sql command
                if (i == verbFileIndex)
                {
                    //FrameId(WordNetId), WordId, SynsetId
                    dbAccessor.CacheCommand("NewFrame",
                        @"INSERT INTO SenseSentenceAssociations(SentenceId, SenseId)
                          SELECT (SELECT SentenceId FROM Sentences WHERE WordNetId=@WordNetId AND IsFrame=1),
                          (SELECT SenseId FROM Senses WHERE WordId=@WordId AND SynsetId=@SynsetId)",
                        SqlCommandType.NonQuery);
                    wordIds = new List<string>(30);
                }

                try
                {
                    fileReader = new StreamReader(filePath); 
                    workerThread.ReportProgress(progress, "Extracting relations from " + fileName + "...");
                    progress += 3;

                    while (!fileReader.EndOfStream)
                        try
                        {
                            //DATA FILE FORMAT:
                            //synset_offset  lex_filenum  ss_type    w_cnt  word    lex_id  [word   lex_id...]  p_cnt       [ptr...]  [frames...]  |   gloss  
                            //decimal(8)     decimal(2)   n|v|a|s|r  hex(2) string  hex(1)   string hex(1)      decimal(3)                              string
                            //ptr -> pointer_symbol  synset_offset  pos  source/target  
                            //frame -> f_cnt    +    f_num  w_num  [  +    f_num  w_num...]  

                            //we stop the operation if we get the cancel signal
                            if (workerThread.CancellationPending)
                            {
                                fileReader.Close();
                                dbAccessor.EndBulkExec(true);
                                eventArgs.Cancel = true;
                                return false;
                            }
                            line = fileReader.ReadLine();

                            //skip comments
                            if (line.StartsWith("  "))
                                continue;

                            parts = line.Split(' ');
                            synsetId = (i + 1).ToString() + parts[0];

                            //the first word in the synset will always start at position 4
                            //wordStartIndex = 4
                            //wordStopIndex = wordStartIndex + word_cnt * 2

                            //try to read w_cnt 
                            if (int.TryParse(parts[3], System.Globalization.NumberStyles.HexNumber, null, out j))
                                wordStopIndex = j * 2 + 4;
                            else
                            {
                                logger.Error("WNError: Failed to get w_cnt from line: " + line);
                                continue;
                            }


                            //try to read p_cnt 
                            if (int.TryParse(parts[wordStopIndex], out j))
                            {
                                //if there are any pointers
                                if (j != 0)
                                {
                                    ptrStartIndex = wordStopIndex + 1;
                                    ptrStopIndex = ptrStartIndex + j * 4;

                                    //insert pointer relations
                                    for (int k = ptrStartIndex; k < ptrStopIndex; k += 4)
                                    {
                                        //get the source and target word index within the synset
                                        if (!int.TryParse(parts[k + 3].Substring(0, 2),
                                            System.Globalization.NumberStyles.HexNumber, null, out sourceWordIndex))
                                        {
                                            logger.Error("WNError: Failed to get sourceWordIndex from line: " + line);
                                            continue;
                                        }

                                        if (!int.TryParse(parts[k + 3].Substring(2, 2),
                                            System.Globalization.NumberStyles.HexNumber, null, out targetWordIndex))
                                        {
                                            logger.Error("WNError: Failed to get targetWordIndex from line: " + line);
                                            continue;
                                        }

                                        //get the RelationTypeId from our local cache
                                        if (relationTypes.TryGetValue(parts[k], out typeId) == false)
                                        {
                                            logger.Error("WNError: Failed to get RelationTypeId for " + parts[k]);
                                            continue;
                                        }

                                        targetSynsetId = GetPOSId(parts[k + 2]) + parts[k + 1];

                                        if (sourceWordIndex == 0 && targetWordIndex == 0)
                                        {
                                            //if both target and source index are 0, this is a semantic relation
                                            //[SourceWordId],[SourceSynsetId],[TargetWordId],[TargetSynsetId],[TypeId]
                                            dbAccessor.EnqueForBulkExec(DBNull.Value, synsetId, DBNull.Value, targetSynsetId, typeId);
                                        }
                                        else
                                        {   //lexical relation
                                            lemma = parts[4 + (sourceWordIndex - 1) * 2];

                                            //if we are reading data.adj remove any adjective markers
                                            if (i == adjFileIndex)
                                            {
                                                pos = lemma.IndexOf('(');

                                                if (pos > 0)
                                                    lemma = lemma.Substring(0, pos);
                                            }

                                            dbAccessor.ExecuteCachedCommand("GetWordId", out result, lemma);
                                            if (result != null)
                                                sourceWordId = result.ToString();
                                            else
                                            {
                                                logger.Error("WNError: Failed to get WordId for " + lemma);
                                                continue;
                                            }

                                            dbAccessor.ExecuteCachedCommand("WIdFmSenses", out result, targetSynsetId, targetWordIndex);
                                            if (result != null)
                                                targetWordId = result.ToString();
                                            else
                                            {
                                                logger.Warn(String.Format("WNError: Failed to get WordId for SynsetId={0} and Index={1}",
                                                    targetSynsetId, targetWordIndex));

                                                //if the synsetid-index combination was not found in the Senses table
                                                //it's likely that this is a duplicate word so we look it up in the corresponding data file
                                                long tempSynsetId;
                                                if (long.TryParse(parts[k + 1], out tempSynsetId))
                                                {
                                                    targetWordId = GetDuplicateWordId(GetPOSId(parts[k + 2]) - 1, tempSynsetId, targetWordIndex);
                                                    if (targetWordId.Length != 0)
                                                    {
                                                        logger.Debug("Succesfuly retrieved word id from the WordNet files.");
                                                    }
                                                    else
                                                        continue;
                                                }
                                                else
                                                {
                                                    logger.Error("WNError: Invalid synset id {0} at line {1} in file {3}.", parts[k + 1],
                                                        line, fileName);
                                                    continue;
                                                }
                                            }

                                            //[SourceWordId],[SourceSynsetId],[TargetWordId],[TargetSynsetId],[TypeId]
                                            dbAccessor.EnqueForBulkExec(sourceWordId, synsetId, targetWordId, targetSynsetId, typeId);
                                        }
                                    }
                                }
                                else
                                    ptrStopIndex = wordStopIndex + 1;

                                //if we are reading data.verb, load the verb frames to the db
                                if (i == verbFileIndex)
                                { 
                                    //try to read f_cnt
                                    if(int.TryParse(parts[ptrStopIndex], out j))
                                    {
                                        wordIds.Clear();
                                        frameStopIndex = ptrStopIndex + 1 + j * 3;
                                        for (j = ptrStopIndex + 1; j < frameStopIndex; j+=3)
                                        {
                                            //get the word to which the frame refers
                                            if (!int.TryParse(parts[j + 2], System.Globalization.NumberStyles.HexNumber,
                                                null, out sourceWordIndex))
                                            {
                                                logger.Error("WNError: Failed to get word index from frames in line: " + line);
                                                continue;
                                            }

                                            //the frame points to a word within the synset
                                            if (sourceWordIndex != 0)
                                            {
                                                if (wordIds.Count == 0)
                                                {
                                                    lemma = parts[4 + (sourceWordIndex - 1) * 2];
                                                    dbAccessor.ExecuteCachedCommand("GetWordId", out result, lemma);

                                                    if (result != null)
                                                        sourceWordId = result.ToString();
                                                    else
                                                    {
                                                        logger.Error("WNError: Failed to get wordId for lemma: " + lemma);
                                                        continue;
                                                    }
                                                }
                                                else
                                                {
                                                    sourceWordId = wordIds[sourceWordIndex - 1];
                                                    if (sourceWordId.Length == 0)
                                                        continue;
                                                }

                                                //WordNetId, WordId, SynsetId
                                                dbAccessor.ExecuteCachedCommand("NewFrame", out result,
                                                    parts[j + 1], sourceWordId, synsetId);
                                            }
                                            //the frame points to the entire synset
                                            else
                                            {
                                                //if so associate the frame with every word in the synset
                                                if(wordIds.Count == 0)
                                                    for (int k = 4; k < wordStopIndex; k += 2)
                                                    {
                                                        dbAccessor.ExecuteCachedCommand("GetWordId", out result, parts[k]);

                                                        if (result != null)
                                                        {
                                                            sourceWordId = result.ToString();
                                                            wordIds.Add(sourceWordId);
                                                        }
                                                        else
                                                        {
                                                            wordIds.Add(String.Empty);
                                                            logger.Error("WNError: Failed to get wordId for lemma: " + parts[k]);
                                                            continue;
                                                        }

                                                        //WordNetId, WordId, SynsetId
                                                        dbAccessor.ExecuteCachedCommand("NewFrame", out result,
                                                            parts[j + 1], sourceWordId, synsetId);
                                                    }
                                                else
                                                    for (int k = 0; k < wordIds.Count; k++)
                                                    { 
                                                        if(wordIds[k].Length >0)
                                                            //WordNetId, WordId, SynsetId
                                                            dbAccessor.ExecuteCachedCommand("NewFrame", out result,
                                                                parts[j + 1], wordIds[k], synsetId);
                                                    }
                                            }
                                        }
                                    }
                                    else
                                    {
                                        logger.Error("WNError: Failed to get f_cnt from line: " + line);
                                        continue;
                                    }
                                }
                            }
                            else
                            {
                                logger.Error("WNError: Failed to get p_cnt from line: " + line);
                                continue;
                            }
                        }
                        catch (Exception e)
                        {
                            logger.Error("Failed to parse line: " + line + " Error: " + e.Message);
                        }

                    fileReader.Close();
                }
                catch (Exception e)
                {
                    logger.Error("Failed to parse " + filePath + " Error: " + e.Message);

                    if (fileReader != null)
                        fileReader.Close();

                    return false;
                }

                if (i == verbFileIndex)
                {
                    dbAccessor.DeleteCachedCommand("NewFrame");
                    wordIds = null;
                }
            }

            workerThread.ReportProgress(progress, "Finishing database operations...");
            dbAccessor.EndBulkExec(false);

            if (workerThread.CancellationPending)
            {
                eventArgs.Cancel = true;
                return false;
            }

            progress += 8;
            return true;
        }

        private String GetDuplicateWordId(int posId, long synsetId, int index)
        {
            try
            {
                String[] parts;
                object result;
                if (posId == 4)
                    posId = 2;
                StreamReader reader = new StreamReader(Path.Combine(wordNetPath, "data." + files[posId]));
                reader.DiscardBufferedData();
                reader.BaseStream.Seek(synsetId, SeekOrigin.Begin);
                parts = reader.ReadLine().Split(' ');
                reader.Close();

                dbAccessor.ExecuteCachedCommand("GetWordId", out result, parts[4 + (index - 1) * 2]);
                if (result != null)
                    return result.ToString();
                else
                {
                    logger.Error("WNError: Failed to get wordId for lemma: " + parts[4 + (index - 1) * 2]);
                    return String.Empty;
                }
            }
            catch (Exception e)
            {
                logger.Error("Failed to get id of duplicate word from file {0}. Error: {1}", "data." + files[posId], e.Message);
                return String.Empty;
            }
        }

        public bool ParseSenseFile()
        {
            String line;
            String[] parts;
            String lemma, categoryId;
            String wordId;
            String posId;
            object result;

            if (!dbAccessor.BeginBulkExecTrans(
                "UPDATE [Senses] SET [SenseNumber]=@SenseNumber, [TagCount]=@TagCount WHERE [WordId]=@WordId AND [SynsetId]=@SynsetId",
                 System.Data.IsolationLevel.ReadCommitted, true))
                return false;

            try
            {
                fileReader = new StreamReader(Path.Combine(wordNetPath, senseFile));
                workerThread.ReportProgress(progress, "Updating word senses data...");
                progress += 5;

                while (!fileReader.EndOfStream)
                {
                    //format: sense_key  synset_offset  sense_number  tag_cnt 
                    //sense_key -> lemma % lex_sense  
                    //lex_sense -> ss_type:lex_filenum:lex_id:head_word:head_id 

                    //we stop the operation if we get the cancel signal
                    if (workerThread.CancellationPending)
                    {
                        fileReader.Close();
                        dbAccessor.EndBulkExec(true);
                        eventArgs.Cancel = true;
                        return false;
                    }
                    line = fileReader.ReadLine();

                    //skip comments
                    if (line.StartsWith("  "))
                        continue;

                    parts = line.Split(' ');

                    //decode the sense key and extract the word lemma, posId and categoryId
                    if (InterpretSenseKey(parts[0], out lemma, out posId, out categoryId))
                    {
                        //get the lemma's wordid
                        dbAccessor.ExecuteCachedCommand("GetWordId", out result, lemma);
                        if (result != null)
                            wordId = result.ToString();
                        else
                        {
                            logger.Error("WNError: Failed to get WordId for " + lemma);
                            continue;
                        }

                        //satellite adjectives(5) are also adjectives(3)
                        if (posId == "5")
                            posId = "3";
                        
                        //update the Senses table
                        //SenseNumber, TagCount, WordId, SynsetId
                        dbAccessor.EnqueForBulkExec(parts[2], parts[3], wordId, posId + parts[1]);
                    }
                    else
                    {
                        logger.Error("WNError: Failed to interpret sense key in line: " + line);
                        continue;
                    }
                }

                fileReader.Close();
                workerThread.ReportProgress(progress, "Finishing database operations...");
                dbAccessor.EndBulkExec(false);
                progress += 5;
            }
            catch (Exception e)
            {
                logger.Error("Failed to parse sense file. Error: " + e.Message);

                if (fileReader != null)
                    fileReader.Close();

                return false;
            }

            return true;
        }

        public bool ParseExceptionFiles()
        {
            String fileName, filePath, line = String.Empty;
            StreamReader fileReader = null;
            String[] parts;
            List<String> previousIds = new List<String>(4);
            String previousStandardWords = String.Empty, wordId = String.Empty;
            String exceptionForm, standardForms;
            int pos;
            object result;

            dbAccessor.CacheCommand("NewException",
                "INSERT INTO Exceptions([WordId],[Exception],[PosId]) VALUES(@WordId,@Exception,@PosId);",
                SqlCommandType.NonQuery);
            
            for (int i = 0; i < files.Length; i++)
            {
                fileName = files[i] + ".exc";
                filePath = Path.Combine(wordNetPath, fileName);

                try
                {
                    fileReader = new StreamReader(filePath); 
                    workerThread.ReportProgress(progress, "Extracting exceptions from " + fileName + "...");
                    progress += 1;

                    while (!fileReader.EndOfStream)
                        try
                        {
                            //we stop the operation if we get the cancel signal
                            if (workerThread.CancellationPending)
                            {
                                fileReader.Close();
                                eventArgs.Cancel = true;
                                return false;
                            }
                            line = fileReader.ReadLine();

                            //skip comments
                            if (line.StartsWith("  "))
                                continue;

                            //there must be at least two words on the line
                            pos = line.IndexOf(' ');
                            if(pos < 0)
                            {
                                logger.Error("Invalid line: " + line);
                                continue;
                            }

                            exceptionForm = line.Substring(0, pos);
                            standardForms = line.Substring(pos + 1, line.Length - 1 - pos).Trim();
                            parts = standardForms.Split(' ');
                            
                            //if there is only one standard word form
                            if (parts.Length == 1)
                            {
                                //if the current standard word form is different than the one in the previous line
                                if (standardForms != previousStandardWords)
                                {
                                    //get the wordid for the lemma
                                    dbAccessor.ExecuteCachedCommand("GetWordId", out result, standardForms);
                                    if (result != null)
                                        wordId = result.ToString();
                                    else
                                    {
                                        logger.Error("Failed to get word id for lemma " + standardForms);
                                        continue;
                                    }

                                    //store the current standard word form
                                    previousStandardWords = standardForms;
                                }

                                //if the standard word form hasn't changed then wordId will hold the
                                //previous value so there's no need to update it
                                //insert the exception 
                                //[WordId],[Exception],[PosId]
                                dbAccessor.ExecuteCachedCommand("NewException", out result, wordId,
                                    exceptionForm, i+1);
                            }
                            //multiple standard word forms
                            else
                            {
                                //if the standard word forms from the previous line are not the same 
                                //as the current ones
                                if (standardForms != previousStandardWords)
                                {
                                    previousIds.Clear();

                                    //get the word ids from the db for each of them
                                    for (pos = 0; pos < parts.Length; pos++)
                                    {
                                        dbAccessor.ExecuteCachedCommand("GetWordId", out result, parts[pos]);
                                        
                                        if (result != null)
                                            wordId = result.ToString();
                                        else
                                        {
                                            previousIds.Add(String.Empty);
                                            logger.Error("Failed to get word id for lemma " + parts[pos]);
                                            continue;
                                        }

                                        previousIds.Add(wordId);

                                        //insert the exception 
                                        //[WordId],[Exception],[PosId]
                                        dbAccessor.ExecuteCachedCommand("NewException", out result, wordId,
                                            exceptionForm, i+1);
                                    }

                                    previousStandardWords = standardForms;
                                }
                                else
                                {
                                    //else use the previously retrieved word ids
                                    //NB: wordIds of length zero indicate that the
                                    //word was not found in the Words table so we're discarding it
                                    for (pos = 0; pos < previousIds.Count; pos++)
                                    {
                                        if (previousIds[pos].Length == 0)
                                            continue;

                                        //[WordId],[Exception],[PosId]
                                        dbAccessor.ExecuteCachedCommand("NewException", out result, previousIds[pos],
                                            exceptionForm, i+1);
                                    }
                                }
                            }


                        }
                        catch (Exception e)
                        {
                            logger.Error("Failed to parse line: " + line + ", Error: " + e.Message);
                        }
                }
                catch (Exception e)
                {
                    logger.Error("Failed to parse {0}. Error: {1}", fileName, e.Message);

                    if (fileReader != null)
                        fileReader.Close();
                }
            }

            dbAccessor.EndBulkExec(false);

            if (workerThread.CancellationPending)
            {
                eventArgs.Cancel = true;
                return false;
            }

            progress += 1;
            return true;
        }

        private bool ParseVerbSentenceFiles()
        {
            StreamReader fileReader = null;
            String line = String.Empty;
            int pos;

            //parse and load sentences from frames.vrb, sents.vrb
            for(int i = 0; i < verbSentenceFiles.Length; i++)
                try
                {
                    fileReader = new StreamReader(Path.Combine(wordNetPath, verbSentenceFiles[i]));
                    workerThread.ReportProgress(progress, "Extracting example sentences from " + verbSentenceFiles[i] + "...");
                    progress += 2;
                    
                    while (!fileReader.EndOfStream)
                        try
                        {
                            //we stop the operation if we get the cancel signal
                            if (workerThread.CancellationPending)
                            {
                                fileReader.Close();
                                eventArgs.Cancel = true;
                                return false;
                            }

                            line = fileReader.ReadLine();

                            if(line.StartsWith("  "))
                                continue;

                            pos = line.IndexOf(' ');
                            if(pos > 0)
                            {
                                dbAccessor.ExecuteNonQueryAsync(
                                    String.Format("INSERT INTO Sentences([WordNetId], [IsFrame], [Text]) VALUES" +
                                    "({0},{1},'{2}');", line.Substring(0, pos), i, line.Substring(pos + 1, line.Length - pos - 1).
                                    Trim().Replace("'", "''")));
                            }
                        }
                        catch (Exception e)
                        {
                            logger.Error("Failed to parse line: " + line + " Error: " + e.Message);
                        }
                    fileReader.Close();
                }
                catch (Exception e)
                {
                    logger.Error("Failed to parse {0}. Error: {1}", verbSentenceFiles[i], e.Message);

                    if (fileReader != null)
                        fileReader.Close();
                }

            progress += 1;
            return true;
        }

        private bool ParseSentidx()
        {
            StreamReader fileReader = null;
            String line = String.Empty;
            int pos;
            String wordId;
            String posId, lemma, categoryId, setenceIds;
            object result;

            //parse sentidx.vrb
            try
            {
                fileReader = new StreamReader(Path.Combine(wordNetPath, "sentidx.vrb"));
                workerThread.ReportProgress(progress, "Associating word senses with examples...");
                
                while (!fileReader.EndOfStream)
                    try
                    {
                        //we stop the operation if we get the cancel signal
                        if (workerThread.CancellationPending)
                        {
                            fileReader.Close();
                            eventArgs.Cancel = true;
                            return false;
                        }
                        line = fileReader.ReadLine();

                        if (line.StartsWith("  "))
                            continue;

                        pos = line.IndexOf(' ');
                        if (pos < 0)
                        {
                            logger.Error("WNError: Invalid line: " + line);
                            continue;
                        }


                        //decode the sense key and extract the word lemma, posId and categoryId
                        if (InterpretSenseKey(line.Substring(0, pos), out lemma, out posId, out categoryId))
                        {
                            //get the lemma's wordid
                            dbAccessor.ExecuteCachedCommand("GetWordId", out result, lemma);
                            if (result != null)
                                wordId = result.ToString();
                            else
                            {
                                logger.Error("WNError: Failed to get WordId for " + lemma);
                                continue;
                            }

                            setenceIds = line.Substring(pos + 1, line.Length - pos - 1).Trim();
                            if (setenceIds.Length > 0)
                                dbAccessor.ExecuteNonQueryAsync(String.Format(
                                    @"INSERT INTO SenseSentenceAssociations(SentenceId, SenseId)
                                      SELECT Sentences.SentenceId, SS.SenseId FROM Sentences
                                      CROSS JOIN (SELECT Senses.SenseId FROM Senses
			                                      LEFT JOIN Synsets ON Synsets.SynsetId = Senses.SynsetId
			                                      WHERE Senses.WordId = {0} AND Synsets.PosId = {1}) AS SS
                                      WHERE Sentences.WordNetId IN ({2}) AND Sentences.IsFrame = 0",
                                      wordId, posId, setenceIds));
                            else
                                logger.Error("WNError: No sentence ids found for word {0}. Line: {1}", lemma, line);
                        }
                        else
                            logger.Error("WNError: Failed to interpret sense key in line: " + line);
                    }
                    catch (Exception e)
                    {
                        logger.Error("Failed to parse line: " + line + " Error: " + e.Message);
                    }
            }
            catch (Exception e)
            {
                logger.Error("Failed to parse sentidx.vrb. Error: " + e.Message);
                return false;
            }

            progress += 5;
            return true;
        }

        private bool InterpretSenseKey(String senseKey, out String lemma, out String posId, out String categoryId)
        {
            int pctPos = senseKey.IndexOf('%');

            if (pctPos < 0)
            {
                lemma = posId = categoryId = null;
                return false;
            }

            lemma = senseKey.Substring(0, pctPos);
            Match lexSense = senseKeyRegex.Match(senseKey, pctPos + 1);

            if (lexSense.Groups.Count < 3)
            {
                lemma = posId = categoryId = null;
                return false;
            }

            posId = lexSense.Groups[1].Value;
            categoryId = lexSense.Groups[2].Value;

            return true;
        }

        private void ExecuteFixes()
        {
            //31 reciprocal relations are missing from WordNet 3.0 
            //so they should be added
            dbAccessor.ExecuteNonQueryAsync(
                @"INSERT INTO [Relations] ([SourceWordId],[SourceSynsetId],[TargetWordId],[TargetSynsetId],[TypeId])
                  SELECT [TargetWordId], [TargetSynsetId], [SourceWordId], [SourceSynsetId], [TypeId] FROM Relations [r]
                  WHERE [TypeId]=13 AND 
                 (SELECT COUNT(*) FROM Relations WHERE [TypeId]=13 AND [SourceSynsetId]=[r].[TargetSynsetId] AND
                 [TargetSynsetId]=[r].[SourceSynsetId])=0;");
        }
    }
}
