﻿using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Runtime.InteropServices;
using ArgusLib.InteropServices;
using ArgusLib.FFmpeg.avutil;

namespace ArgusLib.FFmpeg.avcodec
{
	public class AVCodecContext : NativeWrapper<Interop.AVCodecContextNative>
	{
		#region Fields
		AVCodec codec;
		#endregion

		#region Methods
		public static AVCodecContext CreateInstance(AVCodec codec)
		{
			AVCodecContext RetVal = Interop.Functions.avcodec_alloc_context3(codec);
			RetVal.codec = codec;
			return RetVal;
		}

		public void Open()
		{
			Interop.Functions.avcodec_open2(this, this.codec, IntPtr.Zero);
		}

		protected override void DisposeCore()
		{
			Interop.Functions.avcodec_close(this);
#if Log
			ArgusLib.Log.WriteLine("AVCodecContext closed");
#endif
			avutil.Interop.Functions.av_free(this.Pointer);
#if Log
			ArgusLib.Log.WriteLine("AVCodecContext freed");
#endif
		}

		public bool EncodeVideoFrame(Stream stream, AVVideoFrame frame)
		{
#if Log
			Log.WriteLine("Encode Video Frame...");
			Log.IncrementLevel();
			using (AVPacket packet = new AVPacket())
			{
				bool gotPacket;

				Log.WriteLine("Calling avcodec_encode_video2...");
				Log.IncrementLevel();

				if (Interop.Functions.avcodec_encode_video2(this, packet, frame, out gotPacket) < 0)
					throw new Exception("Encoding error");
				Log.DecrementLevel();
				Log.WriteLine("avcodec_encode_video2 returned");

				Log.Write("gotPacket = ");
				Log.WriteLine(gotPacket);
				if (gotPacket == true)
				{
					byte[] data = packet.Data;
					if (data != null)
					{
						Log.Write("Write data to stream. data.Length = ");
						Log.WriteLine(data.Length);
						stream.Write(data, 0, data.Length);
						Log.WriteLine("Data written.");
					}
				}
				packet.Dispose();

				Log.DecrementLevel();
				Log.WriteLine("Video Frame encoded.");
				return gotPacket;
			}
#else
			using (AVPacket packet = new AVPacket())
			{
				bool gotPacket;

				if (Interop.Functions.avcodec_encode_video2(this, packet, frame, out gotPacket) < 0)
					throw new Exception("Encoding error");

				if (gotPacket == true)
				{
					byte[] data = packet.Data;
					if (data != null)
					{
						stream.Write(data, 0, data.Length);
					}
				}
				return gotPacket;
			}
#endif
		}
		#endregion

		#region Properties

		/// <summary>
		/// picture width / height.
		/// - encoding: MUST be set by user.
		/// - decoding: May be set by the user before opening the decoder if known e.g.
		///				from the container. Some decoders will require the dimensions
		///				to be set by the caller. During decoding, the decoder may
		///				overwrite those values as required.
		/// </summary>
		public int VideoWidth
		{
			get { return this.Structure.width; }
			set
			{
				if (value % 2 != 0)
					value += 1;
				this.Structure.width = value;
			}
		}

		/// <summary>
		/// picture width / height.
		/// - encoding: MUST be set by user.
		/// - decoding: May be set by the user before opening the decoder if known e.g.
		///				from the container. Some decoders will require the dimensions
		///				to be set by the caller. During decoding, the decoder may
		///				overwrite those values as required.
		/// </summary>
		public int VideoHeight
		{
			get { return this.Structure.height; }
			set
			{
				if (value % 2 != 0)
					value += 1;
				this.Structure.height = value;
			}
		}

		/// <summary>
		/// the average bitrate
		/// - encoding: Set by user; unused for constant quantizer encoding.
		/// - decoding: Set by libavcodec. 0 or some bitrate if this info is available in the stream.
		/// </summary>
		public int BitRate
		{
			get { return this.Structure.bit_rate; }
			set { this.Structure.bit_rate = value; }
		}

		/// <summary>
		/// Framerate = 1/timebase.
		/// Timebase: This is the fundamental unit of time (in seconds) in terms
		/// of which frame timestamps are represented. For fixed-fps content,
		/// timebase should be 1/framerate and timestamp increments should be
		/// identically 1.
		/// - encoding: MUST be set by user.
		/// - decoding: Set by libavcodec.
		/// </summary>
		public AVRational FPS
		{
			get { return new AVRational() { num = this.Structure.time_base.den, den = this.Structure.time_base.num }; }
			set { this.Structure.time_base = new AVRational() { num = value.den, den = value.num }; }
		}

		/// <summary>
		/// Pixel format, see <see cref="AVPixelFormat"/>.
		/// May be set by the demuxer if known from headers.
		/// May be overridden by the decoder if it knows better.
		/// - encoding: Set by user.
		/// - decoding: Set by user if known, overridden by libavcodec if known
		/// </summary>
		public AVPixelFormat PixelFormat
		{
			get { return this.Structure.pix_fmt; }
			set { this.Structure.pix_fmt = value; }
		}
		#endregion
	}
}
