use std::{fmt::Debug, net::SocketAddr};
use tokio::sync::mpsc;

use crate::{sdp::field::RtpItem, sip2::dialog::DialogMessage, wav::file::WavBuf};

pub(crate) enum RtpEvent {
	// Close,
	///设置Blag的值.这里因为是直接送的对象,因此只对象
	SetBLag(u32, mpsc::Sender<DialogMessage>),
	///设置录音文件,即需要开始录音
	StartRecordFile(String),
	StopRecord,
	///播放指定内容的文件
	PlayFile(u32, WavBuf),
	///按顺序播放文件列表..如果已经存在播放队列,则在后面进行追加
	PlayListFiles(u32, Vec<WavBuf>),
	StopPlay(u32),
	///设置声音媒体类型
	SetLagAudioMedia(Vec<RtpItem>),
	///设置视频媒体类型
	#[allow(unused)]
	SetLagVedioMedia(u32, Vec<RtpItem>),
	///设置对端rtp地址
	SetPeerRtpAddr(u32, SocketAddr),
	///设置对端rtcp地址
	SetPeerRtcpAddr(u32, SocketAddr),
	Bind,
	DialogClose(u32),
}

impl Debug for RtpEvent {
	fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
		match self {
			RtpEvent::SetBLag(id, _) => write!(f, "SetBLag {}", id),
			RtpEvent::StartRecordFile(n) => write!(f, "StartRecordFile..{}", n),
			RtpEvent::StopRecord => write!(f, "StopRecord",),
			RtpEvent::PlayFile(id, _) => write!(f, "PlayFile {}", id,),
			RtpEvent::PlayListFiles(id, _) => write!(f, "PlayListFiles {}", id),
			RtpEvent::StopPlay(id) => write!(f, "StopPlay {}", id),
			RtpEvent::SetLagAudioMedia(w) => write!(f, "SetLagAudioMedia {:?}", w),
			RtpEvent::SetLagVedioMedia(_, _) => write!(f, "SetLagVedioMedia "),
			RtpEvent::SetPeerRtpAddr(id, r) => write!(f, "SetPeerRtpAddr id:{},,addr:{}", id, r),
			RtpEvent::SetPeerRtcpAddr(id, r) => write!(f, "SetPeerRtcpAddr id:{},,addr:{}", id, r),
			RtpEvent::Bind => write!(f, "Bind "),
			RtpEvent::DialogClose(id) => write!(f, "close dialog {}", id),
		}
	}
}
