﻿////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// <copyright>Copyright 2008-2011 Andreas Huber Doenni</copyright>
// Distributed under the GNU General Public License version 2 (GPLv2).
// See accompanying file License.txt or copy at http://phuse.codeplex.com/license.
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////

namespace Phuse.Mime
{
    using System;
    using System.Collections.Generic;
    using System.IO;
    using System.Linq;
    using System.Text;

    using Phuse.IO;
    using Phuse.Text;

    /// <summary>Provides helper methods for header fields.</summary>
    internal static class HeaderFieldHelper
    {
        private const int MaxLineLength = 78;
        private static readonly int MaxCharactersPerLine = MaxLineLength - StreamHelper.LineBreak.Length;
        private static readonly int MaxWordLength = MaxCharactersPerLine - 1; // -1 to account for folding white space
        private static readonly Token EncodedWordsSeparator = Token.Create(new byte[] { (byte)' ' }, 0, 1, true, false);
        private static readonly int MaxBase64OriginalLength =
            Base64Stream.GetMaxOriginalLength(MaxWordLength - new QuotedPrintableToken(new byte[0], 0, 0, 0).Length);

        /// <summary>Specifies what characters must be encoded (everything except atext and whitespace). See
        /// http://www.ietf.org/rfc/rfc5322.txt, 3.2.3.</summary>
        private static readonly bool[] EncodeTable = new[]
        {
         // 0x00,  0x01,  0x02,  0x03,  0x04,  0x05,  0x06,  0x07,  0x08,  0x09,  0x0a,  0x0b,  0x0c,  0x0d,  0x0e,  0x0f
            true,  true,  true,  true,  true,  true,  true,  true,  true,  false, true,  true,  true,  true,  true,  true,  // 0x00
            true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  // 0x10
            false, false, true,  false, false, false, false, false, true,  true,  false, false, true,  false, true,  false, // 0x20
            false, false, false, false, false, false, false, false, false, false, true,  true,  true,  false, true,  false, // 0x30
            true,  false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, // 0x40
            false, false, false, false, false, false, false, false, false, false, false, true,  true,  true,  false, false, // 0x50
            false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, // 0x60
            false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, true,  // 0x70
            true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  // 0x80
            true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  // 0x90
            true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  // 0xa0
            true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  // 0xb0
            true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  // 0xc0
            true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  // 0xd0
            true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  // 0xe0
            true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true,  true   // 0xf0
        };

        ////////////////////////////////////////////////////////////////////////////////////////////////////////////////

        internal static void FoldHeaderField(string name, HeaderFieldBodyPart[] bodyParts, Stream stream)
        {
            var tokens = Tokenize(name, bodyParts);
            int currentIndex = 0;
            int currentColumn = 0;
            int currentLength;
            int nextIndex;

            while ((nextIndex = FindNextWhitespaceIndex(tokens, currentIndex, out currentLength)) > currentIndex)
            {
                if ((currentColumn > 0) && (currentColumn + currentLength > MaxCharactersPerLine))
                {
                    StreamHelper.WriteLineBreak(stream);
                    currentColumn = 0;
                }

                for (; currentIndex < nextIndex; ++currentIndex)
                {
                    tokens[currentIndex].WriteTo(stream);
                }

                currentColumn += currentLength;
            }

            StreamHelper.WriteLineBreak(stream);
        }

        ////////////////////////////////////////////////////////////////////////////////////////////////////////////////

        private static int FindNextWhitespaceIndex(List<Token> tokens, int startIndex, out int length)
        {
            bool foundNonWhitespace = false;
            length = 0;
            Token token;
            int index;

            for (index = startIndex; index < tokens.Count; ++index)
            {
                token = tokens[index];

                if (foundNonWhitespace == token.IsWhitespace)
                {
                    if (foundNonWhitespace)
                    {
                        return index;
                    }
                    else
                    {
                        foundNonWhitespace = true;
                    }
                }

                length += token.Length;
            }

            return index;
        }

        private static List<Token> Tokenize(string name, HeaderFieldBodyPart[] bodyParts)
        {
            var tokens = Tokenize(name + ": ", false).ToList();
            bool canContainEncodedWords;
            var bodyTokens = Tokenize(bodyParts, out canContainEncodedWords);

            tokens.AddRange(bodyTokens);

            if (canContainEncodedWords)
            {
                SplitOverlongWords(tokens);
            }

            return tokens;
        }

        private static List<Token> Tokenize(HeaderFieldBodyPart[] bodyParts, out bool canContainEncodedWords)
        {
            canContainEncodedWords = false;
            var bodyTokens = new List<Token>();
            HeaderFieldBodyPart bodyPart;

            for (var index = 0; index < bodyParts.Length; ++index)
            {
                bodyPart = bodyParts[index];
                canContainEncodedWords = canContainEncodedWords || bodyPart.CanContainEncodedWords;
                bodyTokens.AddRange(Tokenize(bodyPart.Part, bodyPart.CanContainEncodedWords));
            }

            return bodyTokens;
        }

        private static IEnumerable<Token> Tokenize(string str, bool canContainEncodedWords)
        {
            int length;
            var bytes = GetBytes(str, canContainEncodedWords, out length);
            int offset = 0;

            if (canContainEncodedWords && (length > 0) &&
                ((bytes[0] == (byte)' ') || (bytes[0] == (byte)'\t') ||
                (bytes[length - 1] == (byte)' ') || (bytes[length - 1] == (byte)'\t')))
            {
                // If we're starting or ending in whitespace, we best encode the whole thing, otherwise ws is likely to
                // be skipped when the message is parsed later.
                yield return Token.Create(bytes, offset, length, false, true);
            }
            else
            {
                while (offset < length)
                {
                    yield return GetNextToken(bytes, ref offset, length, canContainEncodedWords);
                }
            }
        }

        private static void SplitOverlongWords(List<Token> tokens)
        {
            for (int tokenIndex = 0; tokenIndex < tokens.Count; ++tokenIndex)
            {
                var currentToken = tokens[tokenIndex];

                while (currentToken.Length > MaxWordLength)
                {
                    var newTokens = currentToken.Split(MaxBase64OriginalLength);
                    tokens[tokenIndex] = newTokens[0];
                    tokens.Insert(++tokenIndex, EncodedWordsSeparator);
                    currentToken = newTokens[1];
                    tokens.Insert(++tokenIndex, currentToken);
                }
            }
        }

        private static byte[] GetBytes(string str, bool canContainEncodedWords, out int length)
        {
            var result = new byte[Encoding.UTF8.GetMaxByteCount(str.Length)];
            length = (canContainEncodedWords ? Encoding.UTF8 : EncodingObjects.ASCII).GetBytes(str, 0, str.Length, result, 0);
            return result;
        }

        private static Token GetNextToken(byte[] array, ref int offset, int pastEnd, bool canContainEncodedWords)
        {
            int originalOffset = offset;
            bool isFirstWhiteSpace = (array[originalOffset] == (byte)' ') || (array[originalOffset] == (byte)'\t');
            byte current;

            for (; offset < pastEnd; ++offset)
            {
                current = array[offset];

                if (canContainEncodedWords && EncodeTable[current])
                {
                    // If we must encode, then encode whatever remains. This can be space inefficient if there are very
                    // few bytes that need to be encoded and the resulting encoded word is so long that it must be split
                    // over several lines. However, in practice this should occur relatively rarely.
                    offset = pastEnd;
                    return Token.Create(array, originalOffset, offset, isFirstWhiteSpace, true);
                }

                if (isFirstWhiteSpace != (current == (byte)' ') || (current == (byte)'\t'))
                {
                    break;
                }
            }

            return Token.Create(array, originalOffset, offset, isFirstWhiteSpace, false);
        }

        /// <summary>Represents a word or white space in a header field.</summary>
        private abstract class Token
        {
            private readonly byte[] array;
            private readonly int offset;
            private readonly int count;
            private readonly int length;
            private readonly bool isWhitespace;

            ////////////////////////////////////////////////////////////////////////////////////////////////////////////

            internal int Length
            {
                get { return this.length; }
            }

            internal bool IsWhitespace
            {
                get { return this.isWhitespace; }
            }

            internal Token[] Split(int firstRawLength)
            {
                int splitOffset = this.offset + firstRawLength;

                // A multi-octet character may not be split across adjacent encoded words, see
                // http://www.ietf.org/rfc/rfc2047.txt, 5.
                // These byte values indicate the second, third and fourth bytes of a multi-byte sequence, see
                // http://en.wikipedia.org/wiki/UTF-8
                while ((this.array[splitOffset] >= 128) && (this.array[splitOffset] < 192))
                {
                    --splitOffset;
                }

                var result = new Token[2];
                result[0] = Create(this.array, this.offset, splitOffset, false, true);
                result[1] = Create(this.array, splitOffset, this.offset + this.count, false, true);
                return result;
            }

            internal virtual void WriteTo(Stream stream)
            {
                stream.Write(this.array, this.offset, this.count);
            }

            internal static Token Create(byte[] array, int offset, int pastEnd, bool isWhitespace, bool encode)
            {
                var count = pastEnd - offset;

                if (encode)
                {
                    var base64RawLength = Base64Stream.GetEncodedLength(count);
                    var quotedPrintableRawLength =
                        QuotedPrintableStream.GetEncodedLength(array, offset, count, true);

                    if (base64RawLength < quotedPrintableRawLength)
                    {
                        return new Base64Token(array, offset, count, base64RawLength);
                    }
                    else
                    {
                        return new QuotedPrintableToken(array, offset, count, quotedPrintableRawLength);
                    }
                }
                else
                {
                    return new IdentityToken(array, offset, count, isWhitespace);
                }
            }

            ////////////////////////////////////////////////////////////////////////////////////////////////////////////

            /// <summary>Initializes a new instance of the <see cref="Token"/> class.</summary>
            protected Token(byte[] array, int offset, int count, int length, bool isWhitespace)
            {
                this.array = array;
                this.offset = offset;
                this.count = count;
                this.length = length;
                this.isWhitespace = isWhitespace;
            }
        }

        /// <summary>Represents a pure ASCII word or white space in a header field.</summary>
        private sealed class IdentityToken : Token
        {
            internal IdentityToken(byte[] array, int offset, int count, bool isWhitespace) :
                base(array, offset, count, count, isWhitespace)
            {
            }
        }

        /// <summary>Represents a base64-encoded word in a header field.</summary>
        private sealed class Base64Token : Token
        {
            private static readonly byte[] Prefix = EncodingObjects.ASCII.GetBytes("=?utf-8?b?");
            private static readonly byte[] Postfix = EncodingObjects.ASCII.GetBytes("?=");

            ////////////////////////////////////////////////////////////////////////////////////////////////////////////

            internal Base64Token(byte[] array, int offset, int count, int rawLength) :
                base(array, offset, count, rawLength + Prefix.Length + Postfix.Length, false)
            {
            }

            internal sealed override void WriteTo(Stream stream)
            {
                stream.Write(Prefix, 0, Prefix.Length);

                using (var encodingStream = new Base64Stream(new ForwardingStream(stream), this.Length))
                {
                    base.WriteTo(encodingStream);
                }

                stream.Write(Postfix, 0, Postfix.Length);
            }
        }

        /// <summary>Represents a quoted-printable-encoded word in a header field.</summary>
        private sealed class QuotedPrintableToken : Token
        {
            private static readonly byte[] Prefix = EncodingObjects.ASCII.GetBytes("=?utf-8?q?");
            private static readonly byte[] Postfix = EncodingObjects.ASCII.GetBytes("?=");

            ////////////////////////////////////////////////////////////////////////////////////////////////////////////

            internal QuotedPrintableToken(byte[] array, int offset, int count, int rawLength) :
                base(array, offset, count, rawLength + Prefix.Length + Postfix.Length, false)
            {
            }

            internal sealed override void WriteTo(Stream stream)
            {
                stream.Write(Prefix, 0, Prefix.Length);

                using (var encodingStream = new QuotedPrintableStream(new ForwardingStream(stream), this.Length, true))
                {
                    base.WriteTo(encodingStream);
                }

                stream.Write(Postfix, 0, Postfix.Length);
            }
        }
    }
}
