﻿using Microsoft.Media.ISO;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Xml;
using System.Runtime.CompilerServices;
#if NETFX_CORE
using Windows.Media.MediaProperties;
#endif

namespace Microsoft.Media.IngestClient
{
    /// <summary>
    /// A static class used to transmux MP4 fragments into PIFF that can be sent to LiveClient.
    /// </summary>
    public static class PiffTransmuxer
    {
        /// <summary>
        /// Creates a PIFF header for a collection of tracks with the given output timescale.
        /// The result can be sent to LiveClient.Start()
        /// </summary>
        /// <param name="tracks">The tracks to create the header for.</param>
        /// <param name="outputTimeScale">The output timescale to use in units per second.</param>
        /// <returns>The PIFF header that can be sent to LiveClient.</returns>
        [MethodImpl(MethodImplOptions.NoOptimization)]
        public static PiffHeader CreateHeader(IList<IEncodedTrack> tracks, uint outputTimeScale)
        {
            var ftyp = new FileTypeBox();
            ftyp.MajorBrand = "isml";
            ftyp.MinorVersion = 1;
            ftyp.CompatibleBrands.Add("piff");
            ftyp.CompatibleBrands.Add("iso2");

            var uuid = new LiveServerManifestBox();
            uuid.Manifest = CreateSMIL(tracks);

            var moov = new MovieBox();
            var now = DateTimeOffset.UtcNow;
            moov.InnerBoxes.Add(new MovieHeaderBox()
            {
                TimeScale = outputTimeScale,
                Volume = 256, // hardcode according to piff spec
                Rate = 65536, // hardcode according to piff spec
                Flags = 0,
                Version = 1,
                Duration = ulong.MaxValue, // hardcode to max value for live
                CreationTime = now,
                ModificationTime = now,
                Matrix = new uint[] { 0x10000, 0, 0, 0, 0x10000, 0, 0, 0, 0x40000000 }, // hardcode according to piff spec
                NextTrackId = (uint)tracks.Count + 1
            });

            uint i = 0U;
            foreach (var track in tracks)
            {
                i++;
                var trak = new TrackBox();
                trak.InnerBoxes.Add(new TrackHeaderBox()
                {
                    Version = 1,
                    Flags = 0x1 | 0x2 | 0x4, // TrackEnabled | TrackInMovie | TrackInPreview
                    CreationTime = now,
                    ModificationTime = now,
                    Duration = ulong.MaxValue, // hardcode to max value for live
                    Height = (track is EncodedVideoTrack) ? ((EncodedVideoTrack)track).EncodingProperties.Height * 0x10000 : 0,
                    Width = (track is EncodedVideoTrack) ? ((EncodedVideoTrack)track).EncodingProperties.Width * 0x10000 : 0,
                    Layer = 0, // hardcode according to piff spec
                    AlternateGroup = 0, // hardcode according to piff spec
                    TrackId = i,
                    Volume = (track is EncodedAudioTrack) ? (ushort)256 : (ushort)0, // hardcode according to piff spec
                    Matrix = new uint[] { 0x10000, 0, 0, 0, 0x10000, 0, 0, 0, 0x40000000 } // hardcode according to piff spec
                });
                var mdia = new MediaBox();
                mdia.InnerBoxes.Add(new MediaHeaderBox()
                {
                    Version = 1,
                    Flags = 0,
                    CreationTime = now,
                    ModificationTime = now,
                    Timescale = outputTimeScale,
                    Duration = ulong.MaxValue,
                    Language = "und",
                    Quality = 0
                });
                if (track is EncodedAudioTrack)
                {
                    mdia.InnerBoxes.Add(new HandlerReferenceBox()
                    {
                        Version = 0,
                        Flags = 0,
                        HandlerType = "soun",
                        Name = "Audio"
                    });
                }
                else if (track is EncodedVideoTrack)
                {
                    mdia.InnerBoxes.Add(new HandlerReferenceBox()
                    {
                        Version = 0,
                        Flags = 0,
                        HandlerType = "vide",
                        Name = "Video"
                    });
                }
                var minf = new MediaInformationBox();
                if (track is EncodedAudioTrack)
                {
                    minf.InnerBoxes.Add(new SoundMediaHeaderBox()
                    {
                        Version = 0, // hardcode according to piff spec
                        Flags = 0,
                        Balance = 0, // hardcode according to piff spec
                    });
                }
                else if (track is EncodedVideoTrack)
                {
                    minf.InnerBoxes.Add(new VideoMediaHeaderBox()
                    {
                        Version = 0, // hardcode according to piff spec
                        Flags = 1,
                        GraphicsMode = 0, // hardcode according to piff spec
                        Color = new ushort[] { 0, 0, 0 } // hardcode according to piff spec
                    });
                }
                var dinf = new DataInformationBox();
                var dref = new DataReferenceBox()
                {
                    Version = 0,
                    Flags = 0
                };
                dref.InnerBoxes.Add(new UrlBox()
                {
                    Version = 0,
                    Flags = 1,
                });
                dinf.InnerBoxes.Add(dref);
                minf.InnerBoxes.Add(dinf);
                var stbl = new SampleTableBox();
                stbl.InnerBoxes.Add(new DecodingTimeToSampleBox()
                {
                    Version = 0,
                    Flags = 0
                });
                stbl.InnerBoxes.Add(new CompositionTimeToSampleBox()
                {
                    Version = 0,
                    Flags = 0
                });
                stbl.InnerBoxes.Add(new SampleChunkBox()
                {
                    Version = 0,
                    Flags = 0
                });
                stbl.InnerBoxes.Add(new SampleChunkOffsetBox()
                {
                    Version = 0,
                    Flags = 0
                });
                stbl.InnerBoxes.Add(new SampleSizeBox()
                {
                    Version = 0,
                    Flags = 0
                });
                var stsd = track.Movie
                        .Tracks.First()
                        .Media
                        .MediaInformation
                        .SampleTable
                        .SampleDescription;
                // copy stsd box directly from the source.
                stbl.InnerBoxes.Add(stsd);
                minf.InnerBoxes.Add(stbl);
                mdia.InnerBoxes.Add(minf);
                trak.InnerBoxes.Add(mdia);
                moov.InnerBoxes.Add(trak);

            }

            var mvex = new MovieExtendsBox();
            i = 0U;
            foreach (var track in tracks)
            {
                i++;
                mvex.InnerBoxes.Add(new TrackExtendsBox()
                {
                    Version = 0,
                    Flags = 0,
                    TrackId = i,
                    SampleDescriptionIndex = 1, // hardcode according to piff spec
                    SampleDuration = 0,
                    SampleSize = 0,
                    SampleFlags = new SampleFlags() // none are set
                });
            }
            moov.InnerBoxes.Add(mvex);

            return new PiffHeader(ftyp, uuid, moov);
        }

        /// <summary>
        /// Creates a PIFF fragment.
        /// The result can be sent to LiveClient.PushFragment()
        /// </summary>
        /// <param name="fmp4">A complete MP4 for the given fragment defined as a collection of boxes.</param>
        /// <param name="trackId">The 1 based numeric track ID.</param>
        /// <param name="fragmentIndex">The 1 based fragment index.</param>
        /// <param name="timestamp">The absolute timestamp of the fragment in 100 nanosecond units.</param>
        /// <param name="duration">The duration of the fragment in 100 nanosecond units.</param>
        /// <param name="outputTimeScale">The timescale to convert the fragment to.</param>
        /// <returns>The PIFF fragment that can be sent to LiveClient.</returns>
        public static PiffFragment CreateFragment(IList<IBox> fmp4, uint trackId, uint fragmentIndex, ulong timestamp, ulong duration, uint outputTimeScale)
        {
            var moov = fmp4.OfType<MovieBox>().First();
            var stbl = moov.Tracks.Single().Media.MediaInformation.SampleTable;
            var mdat = fmp4.OfType<MediaDataBox>().First(); // simply pass along the mdat box from the fmp4
            bool isAudio = stbl.SampleDescription.SampleEntries.FirstOrDefault() is AudioSampleEntryBox;
            bool isVideo = stbl.SampleDescription.SampleEntries.FirstOrDefault() is VisualSampleEntryBox;
            ulong timescale = (ulong)moov.MovieHeader.TimeScale;

            var moof = new MovieFragmentBox(); // construct the data in this from the moov box
            moof.InnerBoxes.Add(new MovieFragmentHeaderBox()
            {
                Version = 0,
                Flags = 0,
                SequenceNumber = fragmentIndex
            });
            var traf = new TrackFragmentBox();
            traf.InnerBoxes.Add(new TrackFragmentHeaderBox()
            {
                Version = 0,
                Flags = 0x20, // only the DefaultSampleFlags is present
                TrackId = trackId,
                BaseDataOffset = 0, // not used because traf.Flags says so.
                SampleDescriptionIndex = 0, // not used because traf.Flags says so.
                DefaultSampleDuration = 0, // not used because traf.Flags says so.
                DefaultSampleSize = 0, // not used because traf.Flags says so.
                DefaultSampleFlags = new SampleFlags() { DegradationPriority = isVideo ? 0x4001 : (isAudio ? 0x8002 : 0x0000) } // Audio gets higher priority over video
            });

            var trun = new TrackFragmentRunBox()
            {
                Version = 0,
                //Flags = 0x1 + 0x4 + 0x100 + 0x200 + 0x800, // everything but sample.SampleFlags will be included.
                Flags = 0x1 + 0x4, // add entry specific flags later depending on whether data is present.
                DataOffset = 0, // update after moof is created with moof.size + base box.size
                FirstSampleFlags = new SampleFlags() { DegradationPriority = isVideo ? 0x4001 : (isAudio ? 0x8002 : 0x0000) } // Audio gets higher priority over video
            };

            foreach (var chunk in stbl.SampleChunk.Entries)
            {
                for (int j = 0; j < chunk.SamplesPerChunk; j++)
                {
                    trun.Samples.Add(new TrackFragmentRunSample()
                    {
                        SampleFlags = null, // not used because trun.Flags says so.
                    });
                }
            }

            if (stbl.SampleSize != null)
            {
                int sampleIndex = 0;
                foreach (var sampleSize in stbl.SampleSize.SizeTable)
                {
                    trun.Samples[sampleIndex].SampleSize = sampleSize;
                    sampleIndex++;
                }
                trun.Flags += 0x200;
            }

            if (stbl.DecodingTimeToSample != null)
            {
                int sampleIndex = 0;
                foreach (var entry in stbl.DecodingTimeToSample.Entries)
                {
                    for (int j = 0; j < entry.SampleCount; j++)
                    {
                        trun.Samples[sampleIndex].SampleDuration = (uint)((ulong)outputTimeScale * (ulong)entry.SampleDelta / timescale);
                        sampleIndex++;
                    }
                }
                trun.Flags += 0x100;
            }

            if (stbl.CompositionTimeToSample != null)
            {
                int sampleIndex = 0;
                foreach (var entry in stbl.CompositionTimeToSample.Entries)
                {
                    for (int j = 0; j < entry.SampleCount; j++)
                    {
                        uint offset;
                        if (entry.SampleOffset > ushort.MaxValue) // perform timescale conversion on the int.
                        {
                            int negativeSampleOffset = (int)entry.SampleOffset;
                            offset = (uint)((int)outputTimeScale * negativeSampleOffset / (int)timescale);
                        }
                        else
                        {
                            offset = (uint)((ulong)outputTimeScale * (ulong)entry.SampleOffset / timescale);
                        }
                        trun.Samples[sampleIndex].SampleCompositionTimeOffset = offset;
                        sampleIndex++;
                    }
                }
                trun.Flags += 0x800;
            }

            traf.InnerBoxes.Add(trun);

            var sdtp = new IndependentAndDisposableSamplesBox()
            {
                Version = 0,
                Flags = 0
            };

            {
                uint sampleIndex = 1;
                foreach (var sample in trun.Samples)
                {
                    // video will have a SyncSamples table to indicate I-Frames. For Audio we just assume first sample.
                    bool isStandalone = (stbl.SyncSamples != null && stbl.SyncSamples.SyncTable.Contains(sampleIndex)) || sampleIndex == 1;
                    sdtp.Samples.Add(new IndependentAndDisposableSample()
                    {
                        SampleDependsOn = (isStandalone) ? 2 : 1,
                        SampleHasRedundancy = 0, // assume false
                        SampleIsDependedOn = 1
                    });
                    sampleIndex++;
                }
            }

            traf.InnerBoxes.Add(sdtp);

            var timeScaleCoiefficient = outputTimeScale / 10000000;
            traf.InnerBoxes.Add(new TrackFragmentExtendedHeaderBox()
            {
                Version = 1,
                Flags = 0,
                FraqmentAbsoluteTime = timestamp * timeScaleCoiefficient,
                FragmentDuration = duration * timeScaleCoiefficient
            });
            moof.InnerBoxes.Add(traf);

            // SinkWriter always starts data at byte 16 in case it needs to make a large box. Compact data by getting ride of first 8 bytes to comply with PIFF.
            // A more robust solution would be to test for this but there's no need if input comes from SinkWriter.
            var compactMdat = new MediaDataBox();
            compactMdat.Data = new byte[mdat.Data.Length - 8];
            System.Buffer.BlockCopy(mdat.Data, 8, compactMdat.Data, 0, compactMdat.Data.Length);

            // now that we have the moof box populated, we can calculate it's size and update trun
            trun.DataOffset = (uint)(moof.CalculateBoxSize()) + 8U; // + base box size

            return new PiffFragment(moof, compactMdat);
        }

        /// <summary>
        /// Creates a PIFF footer.
        /// The result can be sent to LiveClient.Complete()
        /// </summary>
        /// <returns>The PIFF footer that can be sent to LiveClient.</returns>
        public static PiffFooter CreateFooter()
        {
            return new PiffFooter(new MovieFragmentRandomAccessBox());
        }

        static string CreateSMIL(IList<IEncodedTrack> tracks)
        {
            string id = Guid.NewGuid().ToString();

            using (var stream = new MemoryStream())
            {
                using (var writer = XmlWriter.Create(stream, new XmlWriterSettings() { CloseOutput = false, Encoding = Encoding.UTF8 }))
                {
                    writer.WriteStartElement("smil", "http://www.w3.org/2001/SMIL20/Language");
                    {
                        writer.WriteStartElement("head");
                        {
                            writer.WriteStartElement("meta");
                            writer.WriteAttributeString("name", "creator");
                            writer.WriteAttributeString("content", "ExtrepssionEncoder");
                            writer.WriteEndElement();

                            writer.WriteStartElement("rdf", "RDF", "http://www.w3.org/1999/02/22-rdf-syntax-ns#");
                            {
                                writer.WriteAttributeString("xmlns", "dcterms", null, "http://purl.org/dc/terms/");
                                writer.WriteAttributeString("xmlns", "iisms", null, "http://schemas.microsoft.com/iis/media/v4#");
                                writer.WriteAttributeString("xmlns", "wm", null, "http://schemas.microsoft.com/windowsmedia/v11/attributes#");
                                writer.WriteAttributeString("xmlns", "ee", null, "http://schemas.microsoft.com/expression/encoder/v4#");
                                writer.WriteAttributeString("xmlns", "eecustom", null, "http://schemas.microsoft.com/expression/encoder/v4/custom#");
                                writer.WriteAttributeString("xmlns", "rdf", null, "http://www.w3.org/1999/02/22-rdf-syntax-ns#");
                                writer.WriteAttributeString("xmlns", "rdfs", null, "http://www.w3.org/2000/01/rdf-schema#");

                                writer.WriteStartElement("MediaItem", "http://schemas.microsoft.com/expression/encoder/v4#");
                                {
                                    writer.WriteAttributeString("about", "http://www.w3.org/1999/02/22-rdf-syntax-ns#", "guid:" + id);
                                    writer.WriteElementString("ID", "http://schemas.microsoft.com/expression/encoder/v4#", id);
                                    writer.WriteElementString("HasAttachedImages", "http://schemas.microsoft.com/expression/encoder/v4#", "False");
                                    writer.WriteElementString("HasCaptions", "http://schemas.microsoft.com/expression/encoder/v4#", "False");
                                    writer.WriteElementString("HasAudio", "http://schemas.microsoft.com/expression/encoder/v4#", tracks.OfType<EncodedAudioTrack>().Any().ToString());
                                    writer.WriteElementString("HasImage", "http://schemas.microsoft.com/expression/encoder/v4#", "False");
                                    writer.WriteElementString("HasVideo", "http://schemas.microsoft.com/expression/encoder/v4#", tracks.OfType<EncodedVideoTrack>().Any().ToString());
                                    writer.WriteElementString("IsProtected", "http://schemas.microsoft.com/expression/encoder/v4#", "False");
                                    writer.WriteElementString("IsVBR", "http://schemas.microsoft.com/expression/encoder/v4#", "False");
                                    writer.WriteElementString("Streams", "http://schemas.microsoft.com/expression/encoder/v4#", tracks.Count.ToString(CultureInfo.InvariantCulture));
                                    writer.WriteEndElement();
                                }
                                writer.WriteEndElement();
                            }
                            writer.WriteEndElement();
                        }

                        writer.WriteStartElement("body");
                        {
                            writer.WriteStartElement("switch");
                            {
                                int i = 0;
                                foreach (var track in tracks)
                                {
                                    i++;
                                    if (track is EncodedAudioTrack)
                                    {
                                        var audioTrack = (EncodedAudioTrack)track;
                                        writer.WriteStartElement("audio");
                                        {
                                            writer.WriteAttributeString("systemBitrate", audioTrack.EncodingProperties.Bitrate.ToString(CultureInfo.InvariantCulture));
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "systemBitrate");
                                                writer.WriteAttributeString("value", audioTrack.EncodingProperties.Bitrate.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "trackID");
                                                writer.WriteAttributeString("value", i.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "FourCC");
                                                writer.WriteAttributeString("value", GetAudioFourCC(audioTrack.EncodingProperties.Subtype));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                var esds = audioTrack.Movie
                                                    .Tracks.First()
                                                    .Media
                                                    .MediaInformation
                                                    .SampleTable
                                                    .SampleDescription
                                                    .SampleEntries.OfType<AudioSampleEntryBox>().First()
                                                    .ElementaryStreamDescriptor;

                                                string codecPrivateData = string.Empty;
                                                var dsi = esds.StreamDescriptor.SubDescriptors.OfType<DecoderSpecificInformationDescriptor>().FirstOrDefault();
                                                if (dsi != null)
                                                {
                                                    codecPrivateData = ConvertToHex(dsi.Information);
                                                }

                                                writer.WriteAttributeString("name", "CodecPrivateData");
                                                writer.WriteAttributeString("value", codecPrivateData);
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "AudioTag");
                                                writer.WriteAttributeString("value", GetAudioTag(audioTrack.EncodingProperties.Subtype));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "Channels");
                                                writer.WriteAttributeString("value", audioTrack.EncodingProperties.ChannelCount.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "SamplingRate");
                                                writer.WriteAttributeString("value", audioTrack.EncodingProperties.SampleRate.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "BitsPerSample");
                                                writer.WriteAttributeString("value", audioTrack.EncodingProperties.BitsPerSample.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "PacketSize"); // Specifies the block alignment, in bytes.
                                                var blockAlign = (audioTrack.EncodingProperties.ChannelCount * audioTrack.EncodingProperties.BitsPerSample) / 8;
                                                writer.WriteAttributeString("value", blockAlign.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "Subtype");
                                                writer.WriteAttributeString("value", GetAudioFourCC(audioTrack.EncodingProperties.Subtype));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteEndElement();
                                        }
                                    }
                                    else if (track is EncodedVideoTrack)
                                    {
                                        var videoTrack = (EncodedVideoTrack)track;
                                        writer.WriteStartElement("video");
                                        {
                                            writer.WriteAttributeString("systemBitrate", videoTrack.EncodingProperties.Bitrate.ToString(CultureInfo.InvariantCulture));
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "systemBitrate");
                                                writer.WriteAttributeString("value", videoTrack.EncodingProperties.Bitrate.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "trackID");
                                                writer.WriteAttributeString("value", i.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "FourCC");
                                                writer.WriteAttributeString("value", videoTrack.EncodingProperties.Subtype);
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                var avcc = videoTrack.Movie
                                                    .Tracks.First()
                                                    .Media
                                                    .MediaInformation
                                                    .SampleTable
                                                    .SampleDescription
                                                    .SampleEntries.OfType<VisualSampleEntryBox>().First()
                                                    .InnerBoxes.OfType<AdvancedVideoCodingBox>().First();

                                                string sps = ConvertToHex(avcc.SequenceParameters.First().ToArray());
                                                string pps = ConvertToHex(avcc.PictureParameters.First().ToArray());
                                                var codecPrivateData = "00000001" + sps + "00000001" + pps;

                                                writer.WriteAttributeString("name", "CodecPrivateData");
                                                writer.WriteAttributeString("value", codecPrivateData);
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "MaxWidth");
                                                writer.WriteAttributeString("value", videoTrack.EncodingProperties.Width.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "MaxHeight");
                                                writer.WriteAttributeString("value", videoTrack.EncodingProperties.Height.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "DisplayWidth");
                                                writer.WriteAttributeString("value", videoTrack.EncodingProperties.Width.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "DisplayHeight");
                                                writer.WriteAttributeString("value", videoTrack.EncodingProperties.Height.ToString(CultureInfo.InvariantCulture));
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteStartElement("param");
                                            {
                                                writer.WriteAttributeString("name", "Subtype");
                                                writer.WriteAttributeString("value", videoTrack.EncodingProperties.Subtype);
                                                writer.WriteAttributeString("valuetype", "data");
                                                writer.WriteEndElement();
                                            }
                                            writer.WriteEndElement();
                                        }
                                    }
                                }
                                writer.WriteEndElement();
                            }
                            writer.WriteEndElement();
                        }
                        writer.WriteEndElement();
                    }
                }
                stream.Flush();
                stream.Seek(0, SeekOrigin.Begin);
                return new StreamReader(stream).ReadToEnd();
            }
        }

        static string ConvertToHex(byte[] bytes)
        {
            StringBuilder hex = new StringBuilder(bytes.Length * 2);
            foreach (byte b in bytes)
            {
                hex.AppendFormat("{0:x2}", b);
            }
            return hex.ToString();
        }

        static string GetAudioFourCC(string SubType)
        {
            if (SubType.ToLowerInvariant() == MediaEncodingSubtypes.Aac.ToLowerInvariant()) // AAC-LC (AAC low complexity)
                return "AACL";
            else
                throw new NotImplementedException();
        }

        static string GetAudioTag(string SubType)
        {
            if (SubType.ToLowerInvariant() == MediaEncodingSubtypes.Pcm.ToLowerInvariant()) // The sample media format is Linear 8 or 16-bit Pulse Code Modulation 
                return "1";
            else if (SubType.ToLowerInvariant() == MediaEncodingSubtypes.Wma8.ToLowerInvariant() || SubType == MediaEncodingSubtypes.Wma9.ToLowerInvariant()) // Microsoft Windows Media Audio
                return "353";
            else if (SubType.ToLowerInvariant() == MediaEncodingSubtypes.Mpeg1.ToLowerInvariant()) // ISO MPEG-1 Layer III (MP3) 
                return "85";
            else if (SubType.ToLowerInvariant() == MediaEncodingSubtypes.Aac.ToLowerInvariant()) // ISO Advanced Audio Coding (AAC) 
                return "255";
            else // Vendor-extensible format. If specified, the CodecPrivateData field SHOULD contain a hex-encoded version of the WAVE_FORMAT_EXTENSIBLE structure [WFEX]. 
                return "65534";
        }
    }

    /// <summary>
    /// Represents an individual track that needs to be transmuxed. This may be audio or one of the video tracks.
    /// </summary>
    public interface IEncodedTrack
    {
        /// <summary>
        /// The media encoding properties used to define the track.
        /// </summary>
        IMediaEncodingProperties MediaEncodingProperties { get; }

        /// <summary>
        /// A sample and complete movie box to provide additional information to the transmuxer.
        /// </summary>
        MovieBox Movie { get; }
    }

    /// <summary>
    /// Represents a video track with a specific resolution and bitrate. Another instance must be created for alternate resolutions or bitrates.
    /// </summary>
    public sealed class EncodedVideoTrack : IEncodedTrack
    {
        /// <summary>
        /// Creates an instance of EncodedVideoTrack.
        /// </summary>
        /// <param name="encodingProperties">The media encoding properties used to define the track.</param>
        /// <param name="fragment">A collection of boxes that contains a movie box.</param>
        public EncodedVideoTrack(VideoEncodingProperties encodingProperties, IList<IBox> fragment)
        {
            EncodingProperties = encodingProperties;

            Movie = fragment.OfType<MovieBox>().First();
        }

        /// <summary>
        /// The video encoding properties used to define the track.
        /// </summary>
        public VideoEncodingProperties EncodingProperties { get; private set; }

        /// <inheritdoc />
        public MovieBox Movie { get; set; }

        /// <inheritdoc />
        IMediaEncodingProperties IEncodedTrack.MediaEncodingProperties
        {
            get { return EncodingProperties; }
        }
    }

    /// <summary>
    /// Represents an audio track.
    /// </summary>
    public sealed class EncodedAudioTrack : IEncodedTrack
    {
        /// <summary>
        /// Creates an instance of EncodedAudioTrack.
        /// </summary>
        /// <param name="encodingProperties">The media encoding properties used to define the track.</param>
        /// <param name="fragment">A collection of boxes that contains a movie box.</param>
        public EncodedAudioTrack(AudioEncodingProperties encodingProperties, IList<IBox> fragment)
        {
            EncodingProperties = encodingProperties;

            Movie = fragment.OfType<MovieBox>().First();
        }

        /// <summary>
        /// The audio encoding properties used to define the track.
        /// </summary>
        public AudioEncodingProperties EncodingProperties { get; private set; }

        /// <inheritdoc />
        public MovieBox Movie { get; set; }

        /// <inheritdoc />
        IMediaEncodingProperties IEncodedTrack.MediaEncodingProperties
        {
            get { return EncodingProperties; }
        }
    }

    /// <summary>
    /// Represents the data required to initialize a live stream.
    /// </summary>
    public sealed class PiffHeader
    {
        /// <summary>
        /// Creates a new instance of the PiffHeader class.
        /// </summary>
        /// <param name="ftyp">File type box.</param>
        /// <param name="uuid">A special UUID class used to hold the live manifest.</param>
        /// <param name="moov">Movie box.</param>
        public PiffHeader(FileTypeBox ftyp, LiveServerManifestBox uuid, MovieBox moov)
        {
            FileType = ftyp;
            LiveServerManifest = uuid;
            Movie = moov;
        }

        /// <summary>
        /// Gets the file type box.
        /// </summary>
        public FileTypeBox FileType { get; private set; }

        /// <summary>
        /// Gets the UUID box used to hold a live manifest.
        /// </summary>
        public LiveServerManifestBox LiveServerManifest { get; private set; }

        /// <summary>
        /// Gets the movie box.
        /// </summary>
        public MovieBox Movie { get; private set; }
    }

    /// <summary>
    /// Represents the data for a single fragment.
    /// </summary>
    public sealed class PiffFragment
    {
        /// <summary>
        /// Creates a new instance of the PiffFragment class.
        /// </summary>
        /// <param name="moof">Movie fragment box.</param>
        /// <param name="mdat">Media data box.</param>
        public PiffFragment(MovieFragmentBox moof, MediaDataBox mdat)
        {
            MovieFragment = moof;
            MediaData = mdat;
        }

        /// <summary>
        /// Gets the Movie fragment box.
        /// </summary>
        public MovieFragmentBox MovieFragment { get; private set; }

        /// <summary>
        /// Gets the media data box.
        /// </summary>
        public MediaDataBox MediaData { get; private set; }
    }

    /// <summary>
    /// Represents the data required to end a live stream.
    /// </summary>
    public sealed class PiffFooter
    {
        /// <summary>
        /// Creates a new instance of the PiffFooter class.
        /// </summary>
        /// <param name="mfra">Movie fragment random access box.</param>
        public PiffFooter(MovieFragmentRandomAccessBox mfra)
        {
            MovieFragmentRandomAccess = mfra;
        }

        /// <summary>
        /// Gets the movie fragment random access box.
        /// </summary>
        public MovieFragmentRandomAccessBox MovieFragmentRandomAccess { get; private set; }
    }
}
