/******************************************************************************

  File Name     : rtsp_client_api.h
  Version       : Initial Draft
  Author        : tongzhilin
  Created       : 2020/8/6
  Last Modified :
  Description   : rtsp_client_api
  Function List :
  History       :
  1.Date        : 2020/10/27
    Author      : tongzhilin
    Modification: Created file

******************************************************************************/
#include "mp_common.h"
#include "rtsp_client_api.h"
#include "Base64.hh"
#include "liveMedia.hh"
#include "BasicUsageEnvironment.hh"

// By default, we request that the server stream its data using RTP/UDP.
// If, instead, you want to request that the server stream via RTP-over-TCP, change the following to True:
//#define REQUEST_STREAMING_OVER_TCP False
bool REQUEST_STREAMING_OVER_TCP = True;

// Forward function definitions:
// RTSP 'response handlers':
void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode, char* resultString);
void continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString);
void continueAfterPLAY(RTSPClient* rtspClient, int resultCode, char* resultString);

// Other event handler functions:
void subsessionAfterPlaying(void* clientData); // called when a stream's subsession (e.g., audio or video substream) ends
void subsessionByeHandler(void* clientData); // called when a RTCP "BYE" is received for a subsession
void streamTimerHandler(void* clientData);
  // called at the end of a stream's expected duration (if the stream has not already signaled its end using a RTCP "BYE")

void streamStopEventHandler(void* clientData);
void streamStartEventHandler(void* clientData);
//void setupNextSubsession(RTSPClient* rtspClient);

// Used to shut down and close a stream (including its "RTSPClient" object):
void shutdownStream(RTSPClient* rtspClient, int exitCode = 1);

class CRtspSession
{
public:
    CRtspSession();
    virtual ~CRtspSession();

    int     m_s32Chn;
    char    m_rtspUrl[1024];
    int     m_nStatus;
    int     m_debugLevel;
    int     audio_flag;

    const char* m_progName;
    RTSPClient* m_rtspClient;
    
    RTSP_VIDEO_CB_HANDLE        m_pVideoCb;
    RTSP_AUDIO_CB_HANDLE        m_pAudioCb;

    int StartRTSPClient(int chn, char const* progName, char const* rtspURL, int debugLevel);
    int StopRTSPClient();
    int openURL(UsageEnvironment& env);
    int getStatus();
    int sendEos();
};

typedef struct
{
    void*               m_pVideoCb;
    void*               m_pAudioCb;
    int                 m_s32RtspClientCount;

    pthread_mutex_t     m_pMutex;
    CRtspSession*       m_pstRtspSession[RTSP_CHN_NUM_MAX];
}RTSP_CLIENT_PARAM_S;

static pthread_t*               g_pRtspLoopThreadId[RTSP_CLIENT_THREAD_MAX];

static TaskScheduler*           g_stScheduler[RTSP_CLIENT_THREAD_MAX];
static UsageEnvironment*        g_stEnv[RTSP_CLIENT_THREAD_MAX];
static char                     g_eventLoopWatchVariable;
static int                      g_s32Init;
static INT32                    g_s32VideoEncodeType[RTSP_CHN_NUM_MAX] = {RTSP_VIDEO_ENCODE_H264};

static RTSP_CLIENT_PARAM_S      g_stRtspClientInfo = {0};

void *thread_RtspLoop(void *param)
{
    int iThreadChn = (long)param;
    //mp_setaffinity(3);

    MP_LOG_DBG("Rtsp begin LOOP success\n");

    g_stEnv[iThreadChn]->taskScheduler().doEventLoop(&g_eventLoopWatchVariable);
    
    MP_LOG_DBG("Rtsp end LOOP success\n");
    return NULL;
}

static int RtspCheckChn(int chn)
{
    int ret = RTSP_OK;
    
    if(chn < 0 || chn >= RTSP_CHN_NUM_MAX)
    {
        ret = RTSP_FAIL; 
    }
    
    return ret;
}
static int RtspGetStatus(int chn)
{
    INT32 status = RTSP_STATUS_NO_EXIST;

    if(RtspCheckChn(chn))
    {
        MP_LOG_ERR("Rtsp check chn error: %d\n", chn);
        goto EXIT;
    }

    if (g_stRtspClientInfo.m_pstRtspSession[chn])
    {
        status = g_stRtspClientInfo.m_pstRtspSession[chn]->getStatus();
    }
    else
    {
        status = RTSP_STATUS_NO_EXIST;
    }

EXIT:
    return status;
}

static int GetRtspSession(RTSPClient *rtsp_client, int *index)
{
    int ret = RTSP_FAIL;
    int i = 0;
    
    for(i = 0; i < RTSP_CHN_NUM_MAX; i++)
    {
        if(NULL != g_stRtspClientInfo.m_pstRtspSession[i] 
        && (g_stRtspClientInfo.m_pstRtspSession[i]->m_rtspClient == rtsp_client))
        {
            if(index)
            {
                *index = i;
                if(i != g_stRtspClientInfo.m_pstRtspSession[i]->m_s32Chn)
                {
                    MP_LOG_DBG("Rtsp session index error! %d %d\n", i, g_stRtspClientInfo.m_pstRtspSession[i]->m_s32Chn);
                }
                ret = RTSP_OK;
                goto EXIT;
            }
        }
    }
EXIT:
    return ret;
}

// Define a class to hold per-stream state that we maintain throughout each stream's lifetime:

class StreamClientState {
public:
  StreamClientState();
  virtual ~StreamClientState();

public:
  MediaSubsessionIterator* iter;
  MediaSession* session;
  MediaSubsession* subsession;
  TaskToken streamTimerTask;
  EventTriggerId streamEventId;
  EventTriggerId streamEventId2;        //add by tongzhilin rtsp_star 和 rtsp_stop任务分开
  double duration;
};

// If you're streaming just a single stream (i.e., just from a single URL, once), then you can define and use just a single
// "StreamClientState" structure, as a global variable in your application.  However, because - in this demo application - we're
// showing how to play multiple streams, concurrently, we can't do that.  Instead, we have to have a separate "StreamClientState"
// structure for each "RTSPClient".  To do this, we subclass "RTSPClient", and add a "StreamClientState" field to the subclass:

class ourRTSPClient: public RTSPClient {
public:
  static ourRTSPClient* createNew(UsageEnvironment& env, char const* rtspURL,
				  int verbosityLevel = 0, int chn = 0,
				  char const* applicationName = NULL,
                  portNumBits tunnelOverHTTPPortNum = 0, RTSP_VIDEO_CB_HANDLE recv_handler = NULL, RTSP_AUDIO_CB_HANDLE audio_handler = NULL);

protected:
  ourRTSPClient(UsageEnvironment& env, char const* rtspURL,
      int verbosityLevel, int chn, char const* applicationName, portNumBits tunnelOverHTTPPortNum, RTSP_VIDEO_CB_HANDLE recv_handler, RTSP_AUDIO_CB_HANDLE audio_handler);
    // called only by createNew();
  virtual ~ourRTSPClient();

public:
  StreamClientState scs;
  
  char *m_pSdp;
  int   m_s32Chn;
  RTSP_VIDEO_CB_HANDLE m_pVideoCb;
  RTSP_AUDIO_CB_HANDLE m_pAudioCb;
};

// Define a data sink (a subclass of "MediaSink") to receive the data for each subsession (i.e., each audio or video 'substream').
// In practice, this might be a class (or a chain of classes) that decodes and then renders the incoming audio or video.
// Or it might be a "FileSink", for outputting the received data into a file (as is done by the "openRTSP" application).
// In this example code, however, we define a simple 'dummy' sink that receives incoming data, but does nothing with it.

class DummySink: public MediaSink {
public:
  static DummySink* createNew(UsageEnvironment& env,
			      MediaSubsession& subsession, int chn, char*dsp, // identifies the kind of data that's being received
                  char const* streamId = NULL, RTSP_VIDEO_CB_HANDLE recv_handler = NULL, RTSP_AUDIO_CB_HANDLE audio_handler = NULL); // identifies the stream itself (optional)

private:
    DummySink(UsageEnvironment& env, MediaSubsession& subsession, int chn, char*dsp, char const* streamId, RTSP_VIDEO_CB_HANDLE recv_handler, 
        RTSP_AUDIO_CB_HANDLE audio_handler);
    // called only by "createNew()"
  virtual ~DummySink();

  static void afterGettingFrame(void* clientData, unsigned frameSize,
                                unsigned numTruncatedBytes,
				struct timeval presentationTime,
                                unsigned durationInMicroseconds);
  void afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
			 struct timeval presentationTime, unsigned durationInMicroseconds);

private:
  // redefined virtual functions:
  virtual Boolean continuePlaying();

private:
    u_int8_t* fReceiveBuffer;
    MediaSubsession& fSubsession;
    char* fStreamId;

    //add 2020.10.27
    char    m_cNalHead[4];
    char    m_cSps[128];
    int     m_s32SpsLen;

    char    m_cPps[32];
    int     m_s32PpsLen;

    char    m_cSei[2048];
    int     m_s32SeiLen;
    
    char    m_cVps[48];
    int     m_s32VpsLen;
    int     m_s32Chn;

    RTSP_VIDEO_CB_HANDLE m_pVideoCb;
    RTSP_AUDIO_CB_HANDLE m_pAudioCb;
    char*   m_pSdp;
    bool    m_bHaveSpsPps;
    int     m_encoder_type; //@0:264 1:265
};

CRtspSession::CRtspSession()
{
    m_rtspClient = NULL;
    m_pVideoCb = NULL;
    m_pAudioCb = NULL;
}

CRtspSession::~CRtspSession()
{
}

int CRtspSession::sendEos()
{
    return 0;
}

int CRtspSession::StartRTSPClient(int chn, char const* progName, char const* rtspURL, int debugLevel)
{
    m_s32Chn = chn;
    m_progName = progName;
    m_debugLevel = debugLevel;
    m_nStatus = RTSP_STATUS_NONE;
    strncpy(m_rtspUrl, rtspURL, sizeof(m_rtspUrl)-1);
    int iTrdChn = chn%RTSP_CLIENT_THREAD_MAX;

    if (g_stEnv[iTrdChn] != NULL && 0 == openURL(*g_stEnv[iTrdChn]))
    {
        m_nStatus = RTSP_STATUS_RUN;
    }
    else
    {
        m_nStatus = RTSP_STATUS_STOP;
        MP_LOG_ERR("Rtsp client start error (open url error)\n");
    }

    MP_LOG_DBG("Rtsp client start success, chn:%d, url:%s \n", chn, m_rtspUrl);
    return 0;
}

int CRtspSession::StopRTSPClient()
{
    if (m_rtspClient != NULL)
    {
        ourRTSPClient *rtspClient = (ourRTSPClient *)m_rtspClient;
        UsageEnvironment &env = rtspClient->envir();                 // alias
        StreamClientState &scs = ((ourRTSPClient *)rtspClient)->scs; // alias

        /*
         * streamEventId2 will be err(0), when all 32 event handles in live555 have been used.
         * In this time, we need find the unused event and then release it.
        */
        scs.streamEventId2 = env.taskScheduler().createEventTrigger((TaskFunc *)streamStopEventHandler, m_rtspClient);
        if ((scs.streamEventId2 & ~0) == 0)
        {
            MP_LOG_ERR("Create event trigger failed, url:%s\n", m_rtspUrl);
            env.taskScheduler().deleteEventTrigger(0xFFFFFFFF);
        } else {
            env.taskScheduler().triggerEvent(scs.streamEventId2, m_rtspClient);
        }
    }

    return 0;
}

int CRtspSession::getStatus()
{
    return m_nStatus;
}


int CRtspSession::openURL(UsageEnvironment& env)
{
    m_rtspClient = ourRTSPClient::createNew(env, m_rtspUrl, m_debugLevel, m_s32Chn, m_progName, 0, m_pVideoCb, m_pAudioCb);
    if (NULL == m_rtspClient)
    {
        MP_LOG_DBG("create rtsp client error, url:%s\n", m_rtspUrl);
        return -1;
    }
    //mp_sleep_ms(100);

    ++g_stRtspClientInfo.m_s32RtspClientCount;
    
    ourRTSPClient *rtspClient = (ourRTSPClient *)m_rtspClient;

#if 1
    rtspClient->sendDescribeCommand(continueAfterDESCRIBE);
#else
    StreamClientState &scs = ((ourRTSPClient *)rtspClient)->scs; // alias

    /*
     * streamEventId will be err(0), when all 32 event handles in live555 have been used.
     * In this time, we need find the unused event and then release it.
    */
	scs.streamEventId = env.taskScheduler().createEventTrigger((TaskFunc *)streamStartEventHandler, m_rtspClient);
	if ((scs.streamEventId & ~0) == 0) {
        MP_LOG_DBG("Create Event Trigger Failed, rtsp[%s] \n", m_rtspUrl);
        env.taskScheduler().deleteEventTrigger(0xFFFFFFFF);
	} else {
		env.taskScheduler().triggerEvent(scs.streamEventId, m_rtspClient);
	}
#endif
    return 0;
}

// A function that outputs a string that identifies each stream (for debugging output).  Modify this if you wish:
UsageEnvironment& operator<<(UsageEnvironment& env, const RTSPClient& rtspClient) {
  //return env << "[URL:\"" << rtspClient.url() << "\"]: ";
  return env;
}

// A function that outputs a string that identifies each subsession (for debugging output).  Modify this if you wish:
UsageEnvironment& operator<<(UsageEnvironment& env, const MediaSubsession& subsession) {
  return env << subsession.mediumName() << "/" << subsession.codecName();
}

void usage(UsageEnvironment& env, char const* progName) {
  env << "Usage: " << progName << " <rtsp-url-1> ... <rtsp-url-N>\n";
  env << "\t(where each <rtsp-url-i> is a \"rtsp://\" URL)\n";
}

// Used to iterate through each stream's 'subsessions', setting up each one:
void setupNextSubsession(RTSPClient* rtspClient)
{
    UsageEnvironment& env = rtspClient->envir(); // alias
    StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

    scs.subsession = scs.iter->next();
    if (scs.subsession != NULL)
    {
        if (!scs.subsession->initiate())
        {
            env << *rtspClient << "Failed to initiate the \"" << *scs.subsession << "\" subsession: " << env.getResultMsg() << "\n";
            setupNextSubsession(rtspClient); // give up on this subsession; go to the next one
        }
        else
        {
            env << *rtspClient << "Initiated the \"" << *scs.subsession << "\" subsession (";
            if (scs.subsession->rtcpIsMuxed())
            {
                env << "client port " << scs.subsession->clientPortNum();
            } else {
                env << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1;
            }
            env << ")\n";


            MP_LOG_DBG("Stream type :%s\n", scs.subsession->mediumName());
            if(REQUEST_STREAMING_OVER_TCP)
                MP_LOG_DBG("Rtsp stream use tcp\n");
            else
                MP_LOG_DBG("Rtsp stream use udp\n");

            if(0 == strncmp(scs.subsession->mediumName(), "audio", 5))
            {
                int index = -1;
                
                if(RTSP_OK == GetRtspSession(rtspClient, &index))
                {
                    MP_LOG_DBG("Rtsp session index:%d \n", index);
                    g_stRtspClientInfo.m_pstRtspSession[index]->audio_flag = 1;
                }                
            }
            
            // Continue setting up this subsession, by sending a RTSP "SETUP" command:
            rtspClient->sendSetupCommand(*scs.subsession, continueAfterSETUP, False, REQUEST_STREAMING_OVER_TCP);
        }
        return;
    }

    // We've finished setting up all of the subsessions.  Now, send a RTSP "PLAY" command to start the streaming:
    if (scs.session->absStartTime() != NULL)
    {
        // Special case: The stream is indexed by 'absolute' time, so send an appropriate "PLAY" command:
        rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY, scs.session->absStartTime(), scs.session->absEndTime());
    }
    else
    {
        scs.duration = scs.session->playEndTime() - scs.session->playStartTime();
        rtspClient->sendPlayCommand(*scs.session, continueAfterPLAY);
    }
}


// Implementation of the RTSP 'response handlers':
void continueAfterDESCRIBE(RTSPClient* rtspClient, int resultCode, char* resultString)
{
    ourRTSPClient *rtsp_client = NULL;
    UsageEnvironment& env = rtspClient->envir(); // alias
    rtsp_client = dynamic_cast<ourRTSPClient*>(rtspClient);
    if (!rtsp_client)
    {
        env << " rtsp_client error "<< "\n";
        delete[] resultString;
        return;
    }
    do
    {
        StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

        if (resultCode != 0)
        {
            if(401 == resultCode)
            {
                //g_stRtspClientInfo.dev_mgr_info->discard_info.status_info.stream_status[rtsp_client->m_s32Chn] = CHN_STATUS_AUTH_ERR;
            }
            env << *rtspClient << "Failed to get a SDP description: " << resultString << " resultCode:" << resultCode << "\n";
            delete[] resultString;
            break;
        }

        char* const sdpDescription = resultString;
        //env << *rtspClient << "Got a SDP description:\n" << sdpDescription << "\n";

        // Create a media session object from this SDP description:
        scs.session = MediaSession::createNew(env, sdpDescription);
        delete[] sdpDescription; // because we don't need it anymore
        
        if (scs.session == NULL)
        {
            env << *rtspClient << "Failed to create a MediaSession object from the SDP description: " << env.getResultMsg() << "\n";
            break;
        }

#if 0
        if (scs.session->getMedia())
        {
            if (rtsp_client->m_pSdp != NULL)
            {
                //delete[] rtsp_client->m_pSdp;
                free(rtsp_client->m_pSdp);
                rtsp_client->m_pSdp = NULL;
            }

            //rtsp_client->m_pSdp = new char[strlen(scs.session->getMedia()) + 1];
            int mediaLen = strlen(scs.session->getMedia()) + 1;
            rtsp_client->m_pSdp = (char *)malloc(mediaLen);
            if (rtsp_client->m_pSdp)
            {
                memcpy(rtsp_client->m_pSdp, scs.session->getMedia(), mediaLen);
            }
            else
            {
                MP_LOG_ERR("malloc err, rtsp_client->m_pSdp == NULL \n");
            }
        }
        else if (!scs.session->hasSubsessions())
#else 
        if (!scs.session->hasSubsessions())
#endif
        {
            env << *rtspClient << "This session has no media subsessions (i.e., no \"m=\" lines)\n";
            break;
        }

        // Then, create and set up our data source objects for the session.  We do this by iterating over the session's 'subsessions',
        // calling "MediaSubsession::initiate()", and then sending a RTSP "SETUP" command, on each one.
        // (Each 'subsession' will have its own data source.)
        scs.iter = new MediaSubsessionIterator(*scs.session);
        setupNextSubsession(rtspClient);
        return;
    } while (0);

    // An unrecoverable error occurred with this stream.
    MP_LOG_DBG("Shutdown stream!\n");
    //shutdownStream(rtspClient);
}

void continueAfterSETUP(RTSPClient* rtspClient, int resultCode, char* resultString) {
  do {
    UsageEnvironment& env = rtspClient->envir(); // alias
    StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

    if (resultCode != 0) {
      env << *rtspClient << "Failed to set up the \"" << *scs.subsession << "\" subsession: " << resultString << "\n";
      break;
    }

    env << *rtspClient << "Set up the \"" << *scs.subsession << "\" subsession (";
    if (scs.subsession->rtcpIsMuxed()) {
      env << "client port " << scs.subsession->clientPortNum();
    } else {
      env << "client ports " << scs.subsession->clientPortNum() << "-" << scs.subsession->clientPortNum()+1;
    }
    env << ")\n";

    // Having successfully setup the subsession, create a data sink for it, and call "startPlaying()" on it.
    // (This will prepare the data sink to receive data; the actual flow of data from the client won't start happening until later,
    // after we've sent a RTSP "PLAY" command.)

    scs.subsession->sink = DummySink::createNew(env, *scs.subsession, ((ourRTSPClient*)rtspClient)->m_s32Chn,
       ((ourRTSPClient*)rtspClient)->m_pSdp, rtspClient->url(), ((ourRTSPClient*)rtspClient)->m_pVideoCb, ((ourRTSPClient*)rtspClient)->m_pAudioCb);
      // perhaps use your own custom "MediaSink" subclass instead
    if (scs.subsession->sink == NULL) {
      env << *rtspClient << "Failed to create a data sink for the \"" << *scs.subsession
	  << "\" subsession: " << env.getResultMsg() << "\n";
      break;
    }

    env << *rtspClient << "Created a data sink for the \"" << *scs.subsession << "\" subsession\n";
    scs.subsession->miscPtr = rtspClient; // a hack to let subsession handler functions get the "RTSPClient" from the subsession 
    scs.subsession->sink->startPlaying(*(scs.subsession->readSource()),
				       subsessionAfterPlaying, scs.subsession);
    // Also set a handler to be called if a RTCP "BYE" arrives for this subsession:
    if (scs.subsession->rtcpInstance() != NULL) {
      scs.subsession->rtcpInstance()->setByeHandler(subsessionByeHandler, scs.subsession);
    }
  } while (0);
  delete[] resultString;

  // Set up the next subsession, if any:
  setupNextSubsession(rtspClient);
}

void continueAfterPLAY(RTSPClient* rtspClient, int resultCode, char* resultString) {
  Boolean success = False;
  ourRTSPClient *rtsp_client = NULL;
  
  rtsp_client = (ourRTSPClient*)rtspClient;

  do {
    UsageEnvironment& env = rtspClient->envir(); // alias
    StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias

    if (resultCode != 0)
    {
        if(NULL != rtsp_client && (rtsp_client->m_s32Chn >= 0 && rtsp_client->m_s32Chn < RTSP_CHN_NUM_MAX))
        {
            //g_stRtspClientInfo.dev_mgr_info->discard_info.status_info.stream_status[rtsp_client->m_s32Chn] = CHN_STATUS_CONNECT_ERR;
        }
        env << *rtspClient << "Failed to start playing session: " << resultString << "\n";
        break;
    }
    
    // Set a timer to be handled at the end of the stream's expected duration (if the stream does not already signal its end
    // using a RTCP "BYE").  This is optional.  If, instead, you want to keep the stream active - e.g., so you can later
    // 'seek' back within it and do another RTSP "PLAY" - then you can omit this code.
    // (Alternatively, if you don't want to receive the entire stream, you could set this timer for some shorter value.)
    if (scs.duration > 0) {
      unsigned const delaySlop = 2; // number of seconds extra to delay, after the stream's expected duration.  (This is optional.)
      scs.duration += delaySlop;
      unsigned uSecsToDelay = (unsigned)(scs.duration*1000000);
      scs.streamTimerTask = env.taskScheduler().scheduleDelayedTask(uSecsToDelay, (TaskFunc*)streamTimerHandler, rtspClient);
    }

    env << *rtspClient << "Started playing session";
    if (scs.duration > 0) {
      env << " (for up to " << scs.duration << " seconds)";
    }
    env << "...\n";
    
    if(NULL != rtsp_client && rtsp_client->m_s32Chn >= 0 && rtsp_client->m_s32Chn < RTSP_CHN_NUM_MAX)
    {
        //g_stRtspClientInfo.dev_mgr_info->discard_info.status_info.stream_status[rtsp_client->m_s32Chn] = CHN_STATUS_CONNECT_OK;
    }
    
    success = True;
  } while (0);
  delete[] resultString;

  if (!success) {
    // An unrecoverable error occurred with this stream.
    MP_LOG_DBG("Shutdown stream!\n");
    //shutdownStream(rtspClient);
  }
}


// Implementation of the other event handlers:

void subsessionAfterPlaying(void* clientData) {
  MediaSubsession* subsession = (MediaSubsession*)clientData;
  //del by tongzhilin 2019.11.18
  //这里shutdownStream 后再创建 shutdown任务，有小概率会死在任务中，所以shutdown统一在
  //close_client中通过任务调用，完全由线程外管理，也不会有泄漏
  //RTSPClient* rtspClient = (RTSPClient*)(subsession->miscPtr);

  // Begin by closing this subsession's stream:
  Medium::close(subsession->sink);
  subsession->sink = NULL;

  // Next, check whether *all* subsessions' streams have now been closed:
  MediaSession& session = subsession->parentSession();
  MediaSubsessionIterator iter(session);
  while ((subsession = iter.next()) != NULL) {
    if (subsession->sink != NULL) return; // this subsession is still active
  }

  // All subsessions' streams have now been closed, so shutdown the client:
  MP_LOG_DBG("Shutdown stream!\n");
  //shutdownStream(rtspClient);
}

void subsessionByeHandler(void* clientData) {
  MediaSubsession* subsession = (MediaSubsession*)clientData;
  RTSPClient* rtspClient = (RTSPClient*)subsession->miscPtr;
  UsageEnvironment& env = rtspClient->envir(); // alias

  env << *rtspClient << "Received RTCP \"BYE\" on \"" << *subsession << "\" subsession\n";

  // Now act as if the subsession had closed:
  subsessionAfterPlaying(subsession);
}

void streamTimerHandler(void* clientData) {
  ourRTSPClient* rtspClient = (ourRTSPClient*)clientData;
  StreamClientState& scs = rtspClient->scs; // alias

  //这个逻辑不会走进来
  scs.streamTimerTask = NULL;

  // Shut down the stream:
  MP_LOG_DBG("Shutdown stream!\n");
  shutdownStream(rtspClient);
}

void streamStopEventHandler(void *clientData)
{
    if (NULL == clientData)
    {
        MP_LOG_DBG("Client data is NULL!\n"); 
        return;  
    }

    ourRTSPClient *rtspClient = (ourRTSPClient *)clientData;
    UsageEnvironment &env = rtspClient->envir();
    StreamClientState &scs = rtspClient->scs; // alias

    env.taskScheduler().deleteEventTrigger(scs.streamEventId2);
    scs.streamEventId2 = 0;

    // Shut down the stream:
    MP_LOG_DBG("Stop rtsp client, client data:, chn:%d\n", clientData, rtspClient->m_s32Chn);
    shutdownStream(rtspClient);
}

void streamStartEventHandler(void *clientData)
{
    if (NULL == clientData)
    {
        MP_LOG_DBG("Client data is NULL!\n"); 
        return;
    }

    ourRTSPClient *rtspClient = (ourRTSPClient *)clientData;
    MP_LOG_DBG("Start rtsp client 1, client data:, chn:%d\n", clientData, rtspClient->m_s32Chn);
    UsageEnvironment &env = rtspClient->envir();
    StreamClientState &scs = rtspClient->scs; // alias

    env.taskScheduler().deleteEventTrigger(scs.streamEventId);
    scs.streamEventId = 0;

    // start the stream:
    //MP_LOG_DBG("%p streamStartEventHandler called 2, chn=%d\n\n", clientData, rtspClient->m_s32Chn);
    rtspClient->sendDescribeCommand(continueAfterDESCRIBE);
    MP_LOG_DBG("Start rtsp client 2, client data:, chn:%d\n", clientData, rtspClient->m_s32Chn);
}

void shutdownStream(RTSPClient* rtspClient, int exitCode)
{
    int index = -1;
    MP_LOG_DBG("Call shutdown 1, rtsp client:%p \n",rtspClient);
    if(rtspClient != NULL && RTSP_FAIL == GetRtspSession(rtspClient, &index))
    {
        MP_LOG_DBG("Alread shutdown error!");
        return;
    }

    if(RtspGetStatus(index) == RTSP_STATUS_STOP)
    {
        MP_LOG_DBG("Rtsp client Alread stop error!chn = %d\n", index);
        return;
    }
    //UsageEnvironment& env = rtspClient->envir(); // alias
    StreamClientState& scs = ((ourRTSPClient*)rtspClient)->scs; // alias
    int i = 0;

    // First, check whether any subsessions have still to be closed:
    if (scs.session != NULL)
    {
        Boolean someSubsessionsWereActive = False;
        MediaSubsessionIterator iter(*scs.session);
        MediaSubsession* subsession;

        while ((subsession = iter.next()) != NULL)
        {
            if (subsession->sink != NULL)
            {
                Medium::close(subsession->sink);
                subsession->sink = NULL;

                if (subsession->rtcpInstance() != NULL) {
                subsession->rtcpInstance()->setByeHandler(NULL, NULL); // in case the server sends a RTCP "BYE" while handling "TEARDOWN"
                }

                someSubsessionsWereActive = True;
            }
        }

        if (someSubsessionsWereActive)
        {
          // Send a RTSP "TEARDOWN" command, to tell the server to shutdown the stream.
          // Don't bother handling the response to the "TEARDOWN".
          rtspClient->sendTeardownCommand(*scs.session, NULL);
        }
    }

    for(i = 0; i < RTSP_CHN_NUM_MAX; i++)
    {
        if(NULL != g_stRtspClientInfo.m_pstRtspSession[i])
        {
            if(g_stRtspClientInfo.m_pstRtspSession[i]->m_rtspClient == rtspClient)
            {
                g_stRtspClientInfo.m_pstRtspSession[i]->m_rtspClient = NULL;
                
                g_stRtspClientInfo.m_pstRtspSession[i]->sendEos();
                g_stRtspClientInfo.m_pstRtspSession[i]->m_nStatus = RTSP_STATUS_STOP;
                MP_LOG_DBG("g_stRtspClientInfo.m_pstRtspSession[%d]->m_nStatus=RTSP_STATUS_STOP\n", i);

                //add by tongzhilin
                //这里有个bug，Medium::close后rtspclient 释放，这时候如果openurl，里面会重新创建rtspclient
                //有一定概率新创建的rtspclient地址，和这个刚释放的地址是相同的地址，所以这里会把两个地址都置空
                //然后openurl中时候的时候就是个NULL，发生段错误。
                //解決方法是1 这里加break，因为不可能有两个rtsp流共用一个sessiong。2 把close移到最后一步
                break;
            }
        }
    }
    MP_LOG_DBG("Closing the stream , rtsp client:%p\n", rtspClient);
    MP_LOG_DBG("Call shutdown 2, rtsp client:%p \n",rtspClient);
    Medium::close(rtspClient);
    --g_stRtspClientInfo.m_s32RtspClientCount;
}

// Implementation of "ourRTSPClient":
ourRTSPClient* ourRTSPClient::createNew(UsageEnvironment& env, char const* rtspURL,
    int verbosityLevel, int chn, char const* applicationName, portNumBits tunnelOverHTTPPortNum, RTSP_VIDEO_CB_HANDLE recv_handler, RTSP_AUDIO_CB_HANDLE audio_handler) {
    return new ourRTSPClient(env, rtspURL, verbosityLevel, chn, applicationName, tunnelOverHTTPPortNum, recv_handler, audio_handler);
}

ourRTSPClient::ourRTSPClient(UsageEnvironment& env, char const* rtspURL,
    int verbosityLevel, int chn, char const* applicationName, portNumBits tunnelOverHTTPPortNum, RTSP_VIDEO_CB_HANDLE recv_handler, RTSP_AUDIO_CB_HANDLE audio_handler)
  : RTSPClient(env,rtspURL, verbosityLevel, applicationName, tunnelOverHTTPPortNum, -1) {
    m_pVideoCb = recv_handler;
    m_pAudioCb = audio_handler;
    m_s32Chn = chn;
    m_pSdp = NULL;
}

ourRTSPClient::~ourRTSPClient() {
    if (m_pSdp)
    {
        //delete[] m_pSdp;
        free(m_pSdp);
        m_pSdp = NULL;
    }
    MP_LOG_DBG("Release class, this: %p \n",this);
}


// Implementation of "StreamClientState":

StreamClientState::StreamClientState()
  : iter(NULL), session(NULL), subsession(NULL), streamTimerTask(NULL), duration(0.0) {
}

StreamClientState::~StreamClientState() {
  delete iter;
   MP_LOG_DBG("Release class, this: %p \n",this);
  if (session != NULL) {
    // We also need to delete "session", and unschedule "streamTimerTask" (if set)
    UsageEnvironment& env = session->envir(); // alias

    env.taskScheduler().unscheduleDelayedTask(streamTimerTask);
    Medium::close(session);
  }
}


// Implementation of "DummySink":

// Even though we're not going to be doing anything with the incoming data, we still need to receive it.
// Define the size of the buffer that we'll use:
#define DUMMY_SINK_RECEIVE_BUFFER_SIZE (1024*1024)

DummySink* DummySink::createNew(UsageEnvironment& env, MediaSubsession& subsession, int chn, char* dsp, char const* streamId, RTSP_VIDEO_CB_HANDLE recv_handler, 
    RTSP_AUDIO_CB_HANDLE audio_handler) {
  return new DummySink(env, subsession, chn, dsp, streamId, recv_handler, audio_handler);
}

DummySink::DummySink(UsageEnvironment &env, MediaSubsession &subsession, int chn, char *dsp, char const *streamId, RTSP_VIDEO_CB_HANDLE recv_handler, 
    RTSP_AUDIO_CB_HANDLE audio_handler)
    : MediaSink(env),
      fSubsession(subsession)
{
    m_cNalHead[0] = 0x00;
    m_cNalHead[1] = 0x00;
    m_cNalHead[2] = 0x00;
    m_cNalHead[3] = 0x01;

    memset(m_cSps, 0, sizeof(m_cSps));
    memset(m_cPps, 0, sizeof(m_cPps));
    memset(m_cSei, 0, sizeof(m_cSei));
    memset(m_cVps, 0, sizeof(m_cVps));
    m_s32SpsLen = 0;
    m_s32PpsLen = 0;
    m_s32SeiLen = 0;
    m_s32VpsLen = 0;
    m_s32Chn = chn;
    m_pVideoCb = recv_handler;
    m_pAudioCb = audio_handler;
    fStreamId = strdup(streamId);
    fReceiveBuffer = new u_int8_t[DUMMY_SINK_RECEIVE_BUFFER_SIZE];
    m_pSdp = NULL;
    if (dsp)
    {
        //m_pSdp = new char[strlen(dsp) + 1];
        m_pSdp = (char*)malloc(strlen(dsp) + 1);
        if(m_pSdp)
        {
            memcpy(m_pSdp, dsp, strlen(dsp) + 1);
        }
        else
        {
            MP_LOG_ERR("malloc err, m_pSdp == NULL!\n");
        }
    }
    m_bHaveSpsPps = false;
    m_encoder_type = RTSP_VIDEO_ENCODE_H264;
    //除了视频，还有音频session，如果先收到视频session，则无论是264还是265，都会变成264，解决方式如下：
    if (0 == strncmp(fSubsession.codecName(), "H265", 4))
    {
        m_encoder_type = RTSP_VIDEO_ENCODE_H265;
        g_s32VideoEncodeType[m_s32Chn] = m_encoder_type;
    }
    else if(0 == strncmp(fSubsession.codecName(), "H264", 4))
    {
        m_encoder_type = RTSP_VIDEO_ENCODE_H264;
        g_s32VideoEncodeType[m_s32Chn] = m_encoder_type;
    }
}

DummySink::~DummySink() {
  delete[] fReceiveBuffer;
  free(fStreamId);
  if (m_pSdp)
  {
    free(m_pSdp);
    m_pSdp = NULL;
  }
}

void DummySink::afterGettingFrame(void* clientData, unsigned frameSize, unsigned numTruncatedBytes,
				  struct timeval presentationTime, unsigned durationInMicroseconds) {
  DummySink* sink = (DummySink*)clientData;
  sink->afterGettingFrame(frameSize, numTruncatedBytes, presentationTime, durationInMicroseconds);
}

// If you don't want to see debugging output for each received frame, then comment out the following line:
//#define DEBUG_PRINT_EACH_RECEIVED_FRAME 
void DummySink::afterGettingFrame(unsigned frameSize, unsigned numTruncatedBytes,
    struct timeval presentationTime, unsigned /*durationInMicroseconds*/)
{
    // We've just received a frame of data.  (Optionally) print out information about it:
#ifdef DEBUG_PRINT_EACH_RECEIVED_FRAME
    if (fStreamId != NULL) envir() << "Stream \"" << fStreamId << "\"; ";
    envir() << fSubsession.mediumName() << "/" << fSubsession.codecName() << ":\tReceived " << frameSize << " bytes";
    if (numTruncatedBytes > 0) envir() << " (with " << numTruncatedBytes << " bytes truncated)";
    char uSecsStr[6+1]; // used to output the 'microseconds' part of the presentation time
    sprintf(uSecsStr, "%06u", (unsigned)presentationTime.tv_usec);
    envir() << ".\tPresentation time: " << (int)presentationTime.tv_sec << "." << uSecsStr;
    if (fSubsession.rtpSource() != NULL && !fSubsession.rtpSource()->hasBeenSynchronizedUsingRTCP()) {
    envir() << "!"; // mark the debugging output to indicate that this presentation time is not RTCP-synchronized
    }
#ifdef DEBUG_PRINT_NPT
    envir() << "\tNPT: " << fSubsession.getNormalPlayTime(presentationTime);
#endif
    envir() << "\n";
#endif

    if(0 == strncmp(fSubsession.mediumName(), "video", 5) && NULL != m_pVideoCb)
    {
        UINT64 u64Pts;
        UINT32 u32RealFrameSize = frameSize + sizeof(m_cNalHead);
        unsigned char* pHead = fReceiveBuffer + RTSP_DATA_HEAD_LEN - sizeof(m_cNalHead);

        memcpy(pHead, m_cNalHead, sizeof(m_cNalHead));

        int slice_type = -1;

        switch(m_encoder_type)
        {
            case RTSP_VIDEO_ENCODE_H264:
            {
                slice_type = pHead[4] & 0x1F;
                break; 
            }
            case RTSP_VIDEO_ENCODE_H265:
            {
                slice_type = (pHead[4] & 0x7E) >> 1;
                break; 
            }
            default:
            {
                MP_LOG_ERR("encoder_type is unknow %d \n", m_encoder_type);
                break;
            }
        }

        switch(slice_type)
        {
            case NAL_UNIT_PREFIX_SEI:
            case NAL_SEI:
            {
                if (u32RealFrameSize > sizeof(m_cSei))
                {
                    MP_LOG_ERR("sei too long %d\n", u32RealFrameSize);
                }
                else
                {
                    memcpy(m_cSei, pHead, u32RealFrameSize);
                    m_s32SeiLen = u32RealFrameSize;
                    //MP_LOG_DBG("recv %d bytes SEI frame \n", m_s32SeiLen);
                }
                break;
            }
            case NAL_UNIT_VPS:
            {
                if (u32RealFrameSize > sizeof(m_cVps))
                {
                    MP_LOG_ERR("vps too long %d\n", u32RealFrameSize);
                }
                else
                {
                    memcpy(m_cVps, pHead, u32RealFrameSize);
                    m_s32VpsLen = u32RealFrameSize;
                    //MP_LOG_DBG("recv %d bytes VPS frame \n", m_s32SeiLen);
                }
                break;
            }
            case NAL_UNIT_SPS:
            case NAL_SPS:
            {
                m_bHaveSpsPps = true;
                if (u32RealFrameSize > sizeof(m_cSps))
                {
                    MP_LOG_ERR("sps too long %d\n", u32RealFrameSize);
                }
                else
                {
                    memcpy(m_cSps, pHead, u32RealFrameSize);
                    m_s32SpsLen = u32RealFrameSize;
                    //MP_LOG_DBG("recv %d bytes SPS frame,chn=%d\n", m_s32SpsLen, m_s32Chn);
                }
                break;
            }
            case NAL_UNIT_PPS:
            case NAL_PPS:
            {
                m_bHaveSpsPps = true;
                if (u32RealFrameSize > sizeof(m_cPps))
                {
                    MP_LOG_ERR("pps too long %d\n", u32RealFrameSize);
                }
                else
                {
                    memcpy(m_cPps, pHead, u32RealFrameSize);
                    m_s32PpsLen = u32RealFrameSize;
                    //MP_LOG_DBG("recv %d bytes PPS frame,chn=%d\n", m_s32PpsLen, m_s32Chn);
                }
                break;
            }
            case NAL_IDR_W_RADL:
            case NAL_IDR_SLICE:
            {
                unsigned char *p_tmp_head;
                int user_sps_size = 0;
                int user_pps_size = 0;
                unsigned char *user_sps = NULL;
                unsigned char *user_pps = NULL;
                if (m_pSdp && !m_bHaveSpsPps)
                {
                    // cpu满了可能会丢帧，有时候sps和pps被丢了，这时候就走了这里, tzl
                    unsigned tmp_sps_size;
                    unsigned tmp_pps_size;
                    char *index = strstr(m_pSdp, ",");
                    //char *tmp_sps = new char[index - m_pSdp + 1];
                    char *tmp_sps = (char*)malloc(index - m_pSdp + 1);
                    if(tmp_sps)
                    {
                        char *tmp_pps = NULL;
                        memcpy(tmp_sps, m_pSdp, index - m_pSdp);
                        tmp_sps[index - m_pSdp] = '\0';
                        tmp_pps = index + 1;
                        user_sps = base64Decode(tmp_sps, tmp_sps_size);
                        if (user_sps)
                        {
                            user_sps_size = tmp_sps_size;
                            MP_LOG_ERR("Before idr no sps.m_pSdp:%s, sps:%s, size = %d(%d), %p, %x\n", 
                                m_pSdp, tmp_sps, (int)tmp_sps_size, user_sps_size, user_sps, user_sps == NULL ? -1 : user_sps[0]);
                        }
                        user_pps = base64Decode(tmp_pps, tmp_pps_size);
                        if (user_pps)
                        {
                            user_pps_size = tmp_pps_size;
                            MP_LOG_ERR("Before idr no pps.m_pSdp %s, [%s], size = %d(%d), %p, %x\n", m_pSdp, tmp_pps, (int)tmp_pps_size, user_pps_size, user_pps, user_pps == NULL ? -1 : user_pps[0]);
                        }
                        free(tmp_sps);
                        tmp_sps = NULL;
                    }
                    else
                    {
                        MP_LOG_ERR("malloc err, tmp_sps == NULL \n");
                    }
                }

                u64Pts = (unsigned long long)presentationTime.tv_sec * 1000 + presentationTime.tv_usec / 1000;
                p_tmp_head = pHead;

                if (m_s32SeiLen > 0 && ((p_tmp_head-m_s32SeiLen-m_s32PpsLen-m_s32SpsLen) > fReceiveBuffer))
                {
                    p_tmp_head = p_tmp_head - m_s32SeiLen;
                    memcpy(p_tmp_head, m_cSei, m_s32SeiLen);
                    u32RealFrameSize += m_s32SeiLen;
                    m_s32SeiLen = 0;
                }
                else
                {
                    if(m_s32SeiLen > 0)
                    {
                        MP_LOG_ERR("Len overflow(%d %d %d),drop sei \n", m_s32SeiLen, m_s32PpsLen, m_s32SpsLen);
                    }
                }

                if (m_s32PpsLen > 0)
                {
                    p_tmp_head = p_tmp_head - m_s32PpsLen;
                    memcpy(p_tmp_head, m_cPps, m_s32PpsLen);
                    u32RealFrameSize += m_s32PpsLen;
                    m_s32PpsLen = 0;
                }

                if (m_s32SpsLen > 0)
                {
                    p_tmp_head = p_tmp_head - m_s32SpsLen;
                    memcpy(p_tmp_head, m_cSps, m_s32SpsLen);
                    u32RealFrameSize += m_s32SpsLen;
                    m_s32SpsLen = 0;
                }

                if (m_s32VpsLen > 0)
                {
                    p_tmp_head = p_tmp_head - m_s32VpsLen;
                    memcpy(p_tmp_head, m_cVps, m_s32VpsLen);
                    u32RealFrameSize += m_s32VpsLen;
                    m_s32VpsLen = 0;
                }
                
                if (user_sps_size > 0 && p_tmp_head - user_sps_size > fReceiveBuffer)
                {
                    //一般不会走这，需要走也要调整顺序
                    p_tmp_head = p_tmp_head - user_sps_size;
                    if(user_sps)
                    {
                        memcpy(p_tmp_head, user_sps, user_sps_size);
                        u32RealFrameSize += user_sps_size;
                    }
                    
                    p_tmp_head = p_tmp_head - sizeof(m_cNalHead);
                    memcpy(p_tmp_head, m_cNalHead, sizeof(m_cNalHead));
                    u32RealFrameSize += sizeof(m_cNalHead);
                }
                if (user_pps_size > 0 && p_tmp_head - user_pps_size > fReceiveBuffer)
                {
                    //一般不会走这，需要走也要调整顺序
                    p_tmp_head = p_tmp_head - user_pps_size;
                    if(user_pps)
                    {
                        memcpy(p_tmp_head, user_pps, user_pps_size);
                        u32RealFrameSize += user_pps_size;
                    }
                    
                    p_tmp_head = p_tmp_head - sizeof(m_cNalHead);
                    memcpy(p_tmp_head, m_cNalHead, sizeof(m_cNalHead));
                    u32RealFrameSize += sizeof(m_cNalHead);
                }
                //MP_LOG_DBG("recv I frame, len: %d bytes\n", u32RealFrameSize);
                BL_FRAME_DATA_S stFrameData;
                stFrameData.m_eEncodeType = m_encoder_type == RTSP_VIDEO_ENCODE_H264 ? RTSP_VIDEO_ENCODE_H264:RTSP_VIDEO_ENCODE_H265;
                stFrameData.m_eFrameType = BL_AVFRM_TYPE_I;
                stFrameData.m_pdata = (char  *)p_tmp_head;
                stFrameData.m_s32DataSize = u32RealFrameSize;
                stFrameData.m_u64Pts = u64Pts;
                m_pVideoCb((int)m_s32Chn, &stFrameData);
                if (user_sps)
                {
                    delete[] user_sps;
                }
                if (user_pps)
                {
                    delete[] user_pps;
                }

                m_bHaveSpsPps = false;
                break;
            }
            case NAL_SLICE:
            {
                u64Pts = (unsigned long long)presentationTime.tv_sec * 1000 + presentationTime.tv_usec / 1000;
                BL_FRAME_DATA_S stFrameData;
                stFrameData.m_eEncodeType = m_encoder_type == RTSP_VIDEO_ENCODE_H264 ? RTSP_VIDEO_ENCODE_H264:RTSP_VIDEO_ENCODE_H265;
                stFrameData.m_eFrameType = BL_AVFRM_TYPE_P;
                stFrameData.m_pdata = (char  *)pHead;
                stFrameData.m_s32DataSize = u32RealFrameSize;
                stFrameData.m_u64Pts = u64Pts;
                m_pVideoCb((int)m_s32Chn, &stFrameData);
                break;
            }
            default:
            {
                MP_LOG_ERR("slice_type is unknow %d \n", slice_type);
                break;
            }
        }
    }
    else if(0 == strncmp(fSubsession.mediumName(), "audio", 5) && NULL != m_pAudioCb)
    {
        UINT32 u32RealFrameSize = frameSize;
        unsigned char* pHead = fReceiveBuffer + RTSP_DATA_HEAD_LEN;
        UINT64 u64Pts = (unsigned long long)presentationTime.tv_sec * 1000 + presentationTime.tv_usec / 1000;
        BL_AUDIO_DATA_S stAudioData;
        stAudioData.m_pdata = (char  *)pHead;
        stAudioData.m_s32DataSize = u32RealFrameSize;
        stAudioData.m_u64Pts = u64Pts;
        m_pAudioCb((int)m_s32Chn, &stAudioData);
    }
    // Then continue, to request the next frame of data:
    continuePlaying();
}

Boolean DummySink::continuePlaying() {
    if (fSource == NULL) return False; // sanity check (should not happen)
    // Request the next frame of data from our input source.  "afterGettingFrame()" will get called later, when it arrives:

    fSource->getNextFrame(fReceiveBuffer + RTSP_DATA_HEAD_LEN,
            DUMMY_SINK_RECEIVE_BUFFER_SIZE - RTSP_DATA_HEAD_LEN,
            afterGettingFrame, this,onSourceClosure, this);
    return True;
}

int rtsp_init(RTSP_CLEINT_INIT_INFO_S input)
{
    char thr_name[] = "rtsp_thr";
    char getdata_thr_name[] = "rtsp_getdata";
    int ret = RTSP_FAIL;
    
    memset(&g_stRtspClientInfo, 0, sizeof(g_stRtspClientInfo));

    if(input.m_pVideoCb)
    {
        g_s32Init = 1;
        g_stRtspClientInfo.m_pVideoCb = input.m_pVideoCb;
    }
    if(input.m_pAudioCb)
    {
        g_stRtspClientInfo.m_pAudioCb = input.m_pAudioCb;
    }

    pthread_mutex_init(&g_stRtspClientInfo.m_pMutex, NULL);

    pthread_mutex_lock(&g_stRtspClientInfo.m_pMutex);

    if(input.rtsp_type == 1)
    {
        //use udp
        REQUEST_STREAMING_OVER_TCP = False;
    }
    else
    {
        REQUEST_STREAMING_OVER_TCP = True;
    }

    for(INT32 i = 0;i < RTSP_CLIENT_THREAD_MAX; i++)
    {
        if (NULL == g_stScheduler[i])
        {
            g_stScheduler[i] = BasicTaskScheduler::createNew();

            if (NULL == g_stScheduler[i])
            {
                MP_LOG_ERR("g_stScheduler create failed \n");
                goto EXIT;
            }
        }
        if (NULL == g_stEnv[i])
        {
            g_stEnv[i] = BasicUsageEnvironment::createNew(*g_stScheduler[i]);

            if (NULL == g_stEnv[i])
            {
                MP_LOG_ERR("g_stEnv create failed \n");
                goto EXIT;
            }
        }
    }

    g_eventLoopWatchVariable = 0;
    for(long i = 0;i < RTSP_CLIENT_THREAD_MAX; i++)
    {
        if (NULL == g_pRtspLoopThreadId[i])
        {
            g_pRtspLoopThreadId[i] = (pthread_t*)malloc(sizeof(pthread_t));
            if (!g_pRtspLoopThreadId[i])
            {
                MP_LOG_ERR("pthread_create err\n");
                g_pRtspLoopThreadId[i] = NULL;
                goto EXIT;
            }
            ret = pthread_create(g_pRtspLoopThreadId[i], NULL, thread_RtspLoop, (void *)i);
            if (ret != 0)
            {
                MP_LOG_ERR("pthread_create err %d\n", ret);
                goto EXIT;
            }
        }
    }

    if(g_s32Init)
    {
        ret = RTSP_OK;
    }

EXIT:
    if (ret != RTSP_OK)
    {
        for(INT32 i = 0;i < RTSP_CLIENT_THREAD_MAX; i++)
        {
            if (g_pRtspLoopThreadId[i])
            {
                free(g_pRtspLoopThreadId[i]);
                g_pRtspLoopThreadId[i] = NULL;
            }
            if (g_stEnv[i])
            {
                g_stEnv[i]->reclaim();
                //delete g_stEnv;
                g_stEnv[i] = NULL;
            }
            if (g_stScheduler[i])
            {
                delete g_stScheduler[i];
                g_stScheduler[i] = NULL;
            }
        }
        g_s32Init = 0;
        MP_LOG_ERR("Rtsp client init error\n");
    }
    else
    {
        MP_LOG_DBG("Rtsp client init ok\n");
    }
    pthread_mutex_unlock(&g_stRtspClientInfo.m_pMutex);
    //g_stRtspClientInfo.dev_mgr_info = dev_mgr_get_conf_ptr();
    
    return ret;
}

INT32 rtsp_deinit()
{
    int ret = 0;
    if (g_s32Init == 0)
    {
        return RTSP_FAIL;
    }
    for (int i = 0; i < RTSP_CHN_NUM_MAX; i++)
    {
        rtsp_close_url(i);
    }

    pthread_mutex_lock(&g_stRtspClientInfo.m_pMutex);

    g_eventLoopWatchVariable = 1;

    for(INT32 i = 0;i < RTSP_CLIENT_THREAD_MAX; i++)
    {
        if (g_pRtspLoopThreadId[i])
        {
            ret = pthread_join(*g_pRtspLoopThreadId[i], NULL);
            if (ret < 0)
            {
                MP_LOG_ERR("pthread_join error ret = %d \n", ret);
            }
            else
            {
                MP_LOG_DBG("pthread_join OK ret = %d \n", ret);
            }
            free(g_pRtspLoopThreadId[i]);
            g_pRtspLoopThreadId[i] = NULL;
        }
        if (g_stEnv[i])
        {
            g_stEnv[i]->reclaim();
            //delete g_stEnv;
            g_stEnv[i] = NULL;
        }
        if (g_stScheduler[i])
        {
            delete g_stScheduler[i];
            g_stScheduler[i] = NULL;
        }
    }

    pthread_mutex_unlock(&g_stRtspClientInfo.m_pMutex);

    pthread_mutex_destroy(&g_stRtspClientInfo.m_pMutex);

    g_s32Init = 0;

    MP_LOG_DBG("Rtsp client deinit ok\n");

    return RTSP_OK;
}

INT32 rtsp_open_url(int chn, const char *url)
{
    CRtspSession *new_sec_ptr = NULL;
    INT32 ret = RTSP_FAIL;
    if (g_s32Init == 0)
    {
        return RTSP_FAIL;
    }

    pthread_mutex_lock(&g_stRtspClientInfo.m_pMutex);

    if(NULL == url)
    {
        MP_LOG_ERR("rtsp client: url is null \n");
        ret = RTSP_ERRCODE_INPUT_PARAM_NULL;
        goto EXIT;
    }
    
    if(NULL == strstr(url, "rtsp://"))
    {
        MP_LOG_ERR("rtsp client: url is error, url[%s] \n", url);
        ret = RTSP_ERRCODE_INPUT_PARAM_ERR;
        goto EXIT;
    }
    
    if(RtspCheckChn(chn))
    {
        MP_LOG_ERR("chn is error, chn[%d] \n", chn);
        ret = RTSP_ERRCODE_INPUT_PARAM_ERR;
        goto EXIT;
    }
    
    if (g_stRtspClientInfo.m_pstRtspSession[chn] != NULL)
    {
        MP_LOG_ERR("rtsp chn is open, chn[%d] \n", chn);
        ret = RTSP_ERRCODE_INNER_ERR;
        goto EXIT;
    }

    new_sec_ptr = new CRtspSession;
    if (NULL == new_sec_ptr)
    {
        MP_LOG_ERR("new CRtspSession error, chn[%d] \n", chn);
        ret = RTSP_ERRCODE_INNER_ERR;
        goto EXIT;
    }

	new_sec_ptr->m_pVideoCb = (RTSP_VIDEO_CB_HANDLE)g_stRtspClientInfo.m_pVideoCb;
    new_sec_ptr->m_pAudioCb = (RTSP_AUDIO_CB_HANDLE)g_stRtspClientInfo.m_pAudioCb;
    
    if (new_sec_ptr->StartRTSPClient(chn, "rtsp", url, 0))
    {
        MP_LOG_ERR("StartRTSPClient err  %s\n", url);
        delete new_sec_ptr;
        new_sec_ptr = NULL;
        ret = RTSP_ERRCODE_INNER_ERR;
        goto EXIT;
    }

    g_stRtspClientInfo.m_pstRtspSession[chn] = new_sec_ptr;
    //g_stRtspClientInfo.dev_mgr_info->discard_info.status_info.stream_status[chn] = CHN_STATUS_CONNECT_ING;
    
    MP_LOG_DBG("Rtsp client start ok, url:%s, chn:%d, session addr:%p\n", url, chn, g_stRtspClientInfo.m_pstRtspSession[chn]);
    ret = RTSP_OK;
    
EXIT:
    pthread_mutex_unlock(&g_stRtspClientInfo.m_pMutex);
    return ret;
}

INT32 rtsp_close_url(int chn)
{
    INT32 ret = RTSP_FAIL;
    INT32 err_count = 20;
    INT32 status = 0;
    CRtspSession *rtsp_sec_ptr = NULL;
 
    if (g_s32Init == 0)
    {
        return RTSP_FAIL;
    }

    pthread_mutex_lock(&g_stRtspClientInfo.m_pMutex);
    if(RtspCheckChn(chn))
    {
        MP_LOG_ERR("rtsp_close_url err chn %d\n", chn);
        ret = -2;
        goto EXIT;
    }

    rtsp_sec_ptr = g_stRtspClientInfo.m_pstRtspSession[chn];
    if(NULL == rtsp_sec_ptr)
    {
        MP_LOG_ERR("rtsp session is null, chn[%d] \n", chn);
        ret = -3;
        goto EXIT;
    }
    
    rtsp_sec_ptr->StopRTSPClient();
           
    while (--err_count)
    {
        status = RtspGetStatus(chn);
        if (status != RTSP_STATUS_RUN)
        {
            break;
        }
        mp_sleep_ms(50); 
    }
    
    if (err_count == 0)
    {
        MP_LOG_ERR("rtsp client stop FAILED chn %d, status %d\n", chn, status);
        ret = -4;
        goto EXIT;
    }
    
    delete rtsp_sec_ptr;
    rtsp_sec_ptr = NULL;
    g_stRtspClientInfo.m_pstRtspSession[chn] = NULL;
    
    MP_LOG_DBG("rtsp client stop ok chn %d, status %d\n", chn, status);
    ret = RTSP_OK;

EXIT:
    pthread_mutex_unlock(&g_stRtspClientInfo.m_pMutex);
    return ret;
}

