﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using Lucene.Net.Documents;
using Vit.Data;

namespace Vit.PlugIn.Lucene.Converters
{
    public class FileDocumentConverter : IDataConverter
    {
        #region IDataConverter Members

        public object Convert(object obj, Type targetType, object parameter)
        {
            FileInfo file = obj as FileInfo;

            if (file == null)
                return null;

            // make a new, empty document
            Document doc = new Document();

            // Add the path of the file as a field named "path".  Use a field that is 
            // indexed (i.e. searchable), but don't tokenize the field into words.
            doc.Add(new Field("path", file.FullName, Field.Store.YES, Field.Index.UN_TOKENIZED));

            // Add the last modified date of the file a field named "modified".  Use 
            // a field that is indexed (i.e. searchable), but don't tokenize the field
            // into words.
            doc.Add(new Field("modified", DateTools.TimeToString(file.LastWriteTime.Millisecond, DateTools.Resolution.MINUTE), Field.Store.YES, Field.Index.UN_TOKENIZED));

            // Add the contents of the file to a field named "contents".  Specify a Reader,
            // so that the text of the file is tokenized and indexed, but not stored.
            // Note that FileReader expects the file to be in the system's default encoding.
            // If that's not the case searching for special characters will fail.
            doc.Add(new Field("contents", new System.IO.StreamReader(file.FullName, System.Text.Encoding.Default)));

            // return the document
            return doc;
        }

        public object ConvertBack(object obj, Type targetType, object parameter)
        {
            throw new NotImplementedException();
        }

        #endregion
    }
}
