/*
* Copyright (c) Huawei Technologies Co., Ltd. 2017-2019. All rights reserved.
* Description: demuser for ts
* Author: HiMobileCam middleware develop team
* Create: 2017-12-22
*/

#include "ts_format.h"

#ifndef __HuaweiLite__
#include <sys/syscall.h>
#endif

#include "securec.h"
#include "hi_demuxer_err.h"
#include "ffdemux_log.h"

#include "ffavcparser.h"
#include "ffhevcparser.h"
#include "ffrbspparser.h"
#include "ffaacparser.h"

#ifdef __cplusplus
#if __cplusplus
extern "C" {
#endif
#endif /* __cplusplus */

#define TS_DEMUX            "tsDemux"
#define AAC_ADTS_LEN         7
/* define HI_NULL handle for codedex check */
#define HI_NULL_HANDLE    0
#define NAL_PREFIX_3_BYTE_LEN  3
#define NAL_PREFIX_4_BYTE_LEN  4
#define AUDIO_CHANNELS_NUM_MAX 8

typedef enum {
    HEVC_PSLICE = 2,
    HEVC_IDR = 19,
    HEVC_VPS = 32,
    HEVC_SPS = 33,
    HEVC_PPS = 34,
    HEVC_SEI = 39
} HevcNalType;

typedef enum {
    AVC_SEI = 6,
    AVC_IDR = 5,
    AVC_SPS = 7,
    AVC_PPS = 8,
} AvcNalType;

static const HI_U32 g_sampleRateTag[AAC_SAMPLE_RATE_NUM] = {
    96000, 88200, 64000, 48000, 44100, 32000,
    24000, 22050, 16000, 12000, 11025, 8000, 7350
};

static const HI_U8 g_audioChannels[AUDIO_CHANNELS_NUM_MAX] = {
    0, 1, 2, 3, 4, 5, 6, 8
};

static const HI_U8 g_nalPrefix4Byte[NAL_PREFIX_4_BYTE_LEN] = { 0x00, 0x00, 0x00, 0x01 };
static const HI_U8 g_nalPrefix3Byte[NAL_PREFIX_3_BYTE_LEN] = { 0x00, 0x00, 0x01 };

// T-REC-H.264-201602-S!!PDF-E.pdf 7.4.1 NAL unit semantics
static HI_U32 TsGetOneNalUnit(const HI_U8 *buf, HI_U32 size, HI_U8 *outHeaderLen)
{
    HI_U32 pos = 0;

    if (size < NAL_PREFIX_4_BYTE_LEN) {
        return 0;
    }

    if (!memcmp(&buf[pos], g_nalPrefix4Byte, NAL_PREFIX_4_BYTE_LEN)) {
        *outHeaderLen = NAL_PREFIX_4_BYTE_LEN;
        pos += NAL_PREFIX_4_BYTE_LEN;
    } else if (!memcmp(&buf[pos], g_nalPrefix3Byte, NAL_PREFIX_3_BYTE_LEN)) {
        *outHeaderLen = NAL_PREFIX_3_BYTE_LEN  ;
        pos += NAL_PREFIX_3_BYTE_LEN  ;
    } else {
        return 0;
    }

    // find second nal header
    for (; pos <= size - NAL_PREFIX_4_BYTE_LEN; pos++) {
        if (!memcmp(&buf[pos], g_nalPrefix4Byte, NAL_PREFIX_4_BYTE_LEN) ||
            !memcmp(&buf[pos], g_nalPrefix3Byte, NAL_PREFIX_3_BYTE_LEN)) {
            break;
        }
    }

    if (pos >= size) {  // next start code is not found, this must be the last nalu
        return size;
    } else {
        return pos;
    }
}

static HI_S32 TsParseAvcSps(const HI_U8 *crtData, HI_U32 nalLen, HI_U8 headLen, HI_U32 *outWidth, HI_U32 *outHeight)
{
    HI_U8 *pu8SPS = (HI_U8 *)malloc(nalLen - headLen);
    if (pu8SPS == HI_NULL) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "malloc avc stream sps failed\n");
        return HI_FAILURE;
    }
    GenSodpByRbsp(&crtData[headLen], nalLen - headLen, pu8SPS);
    AvcSpsInfo spsInfo = { 0 };

    FFAVCParserParseSps(&spsInfo, pu8SPS, nalLen - headLen);
    // T-REC-H.264-201602-S!!PDF-E.pdf  (G-70)
    *outWidth = ((spsInfo.picWidthInMbsMinus1 + 1) * 16) - spsInfo.frameCropLeftOffset * 2 -
                 spsInfo.frameCropRightOffset * 2;
    *outHeight = ((2 - (spsInfo.frameMbsOnlyFlag ? 1 : 0)) *
        (spsInfo.picHeightInMapUnitsMinus1 + 1) * 16) -
        (spsInfo.frameCropTopOffset * 2) - (spsInfo.frameCropBottomOffset * 2);

    if (pu8SPS) {
        free(pu8SPS);
        pu8SPS = HI_NULL;
    }
    return HI_SUCCESS;
}

static HI_S32 TsParseHevcSps(const HI_U8 *crtData, HI_U32 nalLen, HI_U8 headLen, HI_U32 *outWidth, HI_U32 *outHeight)
{
    if (nalLen <= headLen) {
        return HI_FALSE;
    }
    HI_U8 *pu8SPS = (HI_U8 *)malloc(nalLen - headLen);
    if (pu8SPS == HI_NULL) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "malloc hevc stream sps failed\n");
        return HI_FALSE;
    }

    HEVCSpsInfo spsInfo = { 0 };
    GenSodpByRbsp(&crtData[headLen], nalLen - headLen, pu8SPS);
    FFHEVCParserParseSps(&spsInfo, pu8SPS, nalLen - headLen);
    *outWidth = spsInfo.picWidthInLumaSamples;
    *outHeight = spsInfo.picHeightInLumaSamples;
    if (pu8SPS) {
        free(pu8SPS);
        pu8SPS = HI_NULL;
    }
    return HI_SUCCESS;
}

static HI_S32 TsParseVideoSps(enum AVCodecID vidCodecId, const HI_U8 *data,
    HI_U32 dataLen, HI_U32 *outWidth, HI_U32 *outHeight)
{
    const HI_U8 *crtData = data;
    HI_U32 leftLen = dataLen;
    HI_U8 headLen = 0;
    HI_BOOL parsedSPS = HI_FALSE;

    if (vidCodecId != AV_CODEC_ID_H264
        && vidCodecId != AV_CODEC_ID_HEVC) {
        return HI_FAILURE;
    }

    do {
        HI_U8 nalType = 0;
        HI_U32 nalLen = TsGetOneNalUnit(crtData, leftLen, &headLen);
        if (nalLen == 0 || (nalLen <= headLen)) {
            break;
        }

        if (vidCodecId == AV_CODEC_ID_H264) {
            nalType = crtData[headLen] & 0x1F;
            if (nalType == AVC_SPS) {
                parsedSPS = TsParseAvcSps(crtData, nalLen, headLen, outWidth, outHeight) == HI_SUCCESS;
                break;
            }
        } else if (vidCodecId == AV_CODEC_ID_HEVC) {
            // T-REC-H.265-201504-I!!PDF-E   7.3.1.2 NAL unit header syntax
            nalType = (crtData[headLen] & 0x7E) >> 1;
            if (nalType == HEVC_SPS) {
                parsedSPS = TsParseHevcSps(crtData, nalLen, headLen, outWidth, outHeight) == HI_SUCCESS;
                break;
            }
        }

        crtData += nalLen;
        leftLen -= nalLen;
    } while (crtData < (data + dataLen));

    if (!parsedSPS) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "there no sps info in frame error\n");
        return HI_FAILURE;
    }
    return HI_SUCCESS;
}

static HI_S32 TsParseAudioFrame(const HI_U8 *data, HI_U32 dataLen, AacMediaInfo *outAacInfo)
{
    HI_U32 len = 0;
    AacAdtsHeader adtsHeader = { 0 };

    if (FFAACParserParseAdtsHeader(data, dataLen, &adtsHeader, &len) != HI_SUCCESS) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "AAC parse audio frame error\n");
        return HI_FAILURE;
    }

    if (len == 0) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "AAC parse audio frame error\n");
        return HI_FAILURE;
    }

    outAacInfo->sync = adtsHeader.sync;
    outAacInfo->channelNum = g_audioChannels[adtsHeader.channelConfig];
    outAacInfo->sampleRate = g_sampleRateTag[adtsHeader.sampRateIdx];
    outAacInfo->frameLength = adtsHeader.frameLength;
    return HI_SUCCESS;
}

static HI_BOOL TsIsMultiAdtsAudioFrame(const HI_U8 *data, HI_U32 dataLen)
{
    const HI_U8 *crtData = data;
    AacMediaInfo aacInfo;
    HI_U32 leftLen = dataLen;

    if (memset_s(&aacInfo, sizeof(aacInfo), 0x00, sizeof(aacInfo)) != EOK) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "set aacInfo error\n");
    }

    HI_S32 ret = TsParseAudioFrame(crtData, leftLen, &aacInfo);
    if ((ret != HI_SUCCESS) || (aacInfo.sync != (HI_U32)0xfff)) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "audio frame have no sync header error\n");
        return HI_TRUE;
    }

    crtData += aacInfo.frameLength;
    leftLen -= aacInfo.frameLength;

    do {
        if (!((crtData[0] == 0xFF) && (((crtData[1] & 0xF0) >> 4) == 0x0F))) {
            crtData++;
            leftLen--;
        } else {
            FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR,
                "do not support another aac frame in one packet \n");
            return HI_TRUE;
        }
    } while (crtData < data + dataLen);

    return HI_FALSE;
}

static HI_FORMAT_VIDEO_TYPE_E TsGetVideoType(enum AVCodecID vidCodecId)
{
    if (vidCodecId == AV_CODEC_ID_H264) {
        return HI_FORMAT_VIDEO_TYPE_H264;
    } else if (vidCodecId == AV_CODEC_ID_HEVC) {
        return HI_FORMAT_VIDEO_TYPE_H265;
    } else {
        return HI_FORMAT_VIDEO_TYPE_BUTT;
    }
}

static HI_BOOL TsIsIdrFrame(enum AVCodecID vidCodecId, const HI_U8 *data, HI_U32 dataLen)
{
    const HI_U8 *crtData = data;
    HI_U32 leftLen = dataLen;
    HI_U8 headLen = 0;
    HI_BOOL isIDR = HI_FALSE;

    do {
        HI_U32 nalLen = TsGetOneNalUnit(crtData, leftLen, &headLen);
        if (nalLen == 0) {
            break;
        }

        HI_U8 nalType;
        if (vidCodecId == AV_CODEC_ID_H264) {
            nalType = crtData[headLen] & 0x1F;
            if ((nalType == AVC_SPS) || (nalType == AVC_IDR)) {
                isIDR = HI_TRUE;
                break;
            }
        } else if (vidCodecId == AV_CODEC_ID_HEVC) {
            nalType = (crtData[headLen] & 0x7E) >> 1;
            if ((nalType == HEVC_SPS) || (nalType == HEVC_PPS) || (nalType == HEVC_IDR)) {
                isIDR = HI_TRUE;
                break;
            }
        }

        crtData += nalLen;
        leftLen -= nalLen;
    } while (crtData < (data + dataLen));

    return isIDR;
}

HI_S32 TsSeekMS(FfDemuxFormatMember *tsDemuxer, HI_S64 toMs, __attribute__((unused)) HI_S32 flag)
{
    HI_S64 seekTarget = ((HI_S64)toMs * FFMPEG_MS_US_UNIT);

    /* timeUs may oversize s64 to be negative value */
    if (toMs < 0 || seekTarget < 0) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR,
            "seek input time is negative or oversize: %lld ms\n", toMs);
        return HI_ERR_DEMUXER_ILLEGAL_PARAM;
    }

    if (seekTarget > tsDemuxer->formatContext->duration) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_DEBUG,
            "seek input time beyond total time seektime: %lld\n", seekTarget);
        seekTarget = tsDemuxer->formatContext->duration;
    }

    if (tsDemuxer->formatContext->start_time != AV_NOPTS_VALUE) {
        seekTarget += tsDemuxer->formatContext->start_time;
    }

    /* we use one second to ensure thar the avformat_seek_file interface can find
     the key frame beyond seekMin and seekMax */
    HI_S32 ret = avformat_seek_file(tsDemuxer->formatContext, -1, INT64_MIN, seekTarget, seekTarget, 0);
    if (ret < 0) {
        ret = avformat_seek_file(tsDemuxer->formatContext, -1, seekTarget, seekTarget, INT64_MAX, 0);
        if (ret < 0) {
            FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_DEBUG,
                           "error while seeking %lld ,streamIndex:%d, ret:%d\n",
                           toMs, 0, ret);
            return HI_ERR_DEMUXER_SEEK;
        }
    }

    // pkt.pts is time base on stream->timebase, need expressed in AV_TIME_BASE units
    tsDemuxer->lastReadPts = seekTarget;
    return HI_SUCCESS;
}

HI_S32 TsReadUntilIframe(FfDemuxFormatMember *tsDemuxer)
{
    HI_S32 ret = HI_SUCCESS;
    AVPacket pkt;
    HI_BOOL isFindIdr = HI_FALSE;
    HI_BOOL isIdrFrame;

    if (memset_s(&pkt, sizeof(pkt), 0x00, sizeof(pkt)) != EOK) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "set pkt error\n");
    }
    av_init_packet(&pkt);

    while (!isFindIdr) {
        ret = av_read_frame(tsDemuxer->formatContext, &pkt);
        if (ret == AVERROR_EOF) {
            ret = HI_SUCCESS;
            break;
        } else if (ret != HI_SUCCESS || pkt.size <= 0) {
            FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "av_read_frame ret failed or ret size 0\n");
            ret = HI_FAILURE;
            break;
        }

        isIdrFrame = (pkt.stream_index == tsDemuxer->usedVideoStreamIndex) &&
            TsIsIdrFrame(tsDemuxer->vidCodecId, pkt.data, pkt.size);
        if (isIdrFrame) {
            isFindIdr = HI_TRUE;
            HI_S64 pos = pkt.pos;
            av_packet_unref(&pkt);
            if (avio_seek(tsDemuxer->formatContext->pb, pos, SEEK_SET) < 0) {
                FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "seek to last Idr frame error\n");
            }
            avformat_flush(tsDemuxer->formatContext);
            break;
        }
        av_packet_unref(&pkt);
    }

    return ret;
}

static HI_VOID TsProbeUsedVideoByParserFrame(FfDemuxFormatMember *tsDemuxer, const AVPacket *pkt)
{
    HI_S32 ret = TsParseVideoSps(tsDemuxer->vidCodecId, pkt->data, pkt->size, &tsDemuxer->width, &tsDemuxer->height);
    if (ret != HI_SUCCESS) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "TsParseVideoSps failed\n");
    }

    tsDemuxer->stSteamResolution[pkt->stream_index].s32VideoStreamIndex = pkt->stream_index;
    tsDemuxer->stSteamResolution[pkt->stream_index].u32Height = tsDemuxer->height;
    tsDemuxer->stSteamResolution[pkt->stream_index].u32Width = tsDemuxer->width;
    tsDemuxer->stSteamResolution[pkt->stream_index].enVideoType = TsGetVideoType(tsDemuxer->vidCodecId);
}

static HI_VOID TsProbeUnusedVideoByParserFrame(FfDemuxFormatMember *tsDemuxer, const AVPacket *pkt)
{
    enum AVCodecID vidCodecId = tsDemuxer->formatContext->streams[pkt->stream_index]->codecpar->codec_id;
    HI_U32 width = 0;
    HI_U32 height = 0;
    HI_S32 ret = TsParseVideoSps(vidCodecId, pkt->data, pkt->size, &width, &height);
    if (ret != HI_SUCCESS) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "TsParseVideoSps failed\n");
    }
    tsDemuxer->stSteamResolution[pkt->stream_index].s32VideoStreamIndex = pkt->stream_index;
    tsDemuxer->stSteamResolution[pkt->stream_index].u32Height = height;
    tsDemuxer->stSteamResolution[pkt->stream_index].u32Width = width;
    tsDemuxer->stSteamResolution[pkt->stream_index].enVideoType = TsGetVideoType(vidCodecId);
}

HI_S32 TsProbeByParseFrame(FfDemuxFormatMember *tsDemuxer)
{
    HI_BOOL isFindVideo = HI_FALSE;
    HI_BOOL isFindAudio = HI_FALSE;
    HI_S32 ret = HI_SUCCESS;
    HI_S32 parsedVidNum = 0;
    AVPacket pkt;

    if (memset_s(&pkt, sizeof(pkt), 0x00, sizeof(pkt)) != EOK) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "set  audio&video pkt failed\n");
    }

    av_init_packet(&pkt);

    while ((tsDemuxer->usedVideoStreamIndex != HI_DEMUXER_NO_MEDIA_STREAM && !isFindVideo) ||
           (tsDemuxer->audioStreamIndex != HI_DEMUXER_NO_MEDIA_STREAM && !isFindAudio) ||
           (parsedVidNum < tsDemuxer->videoStreamNum)) {
        ret = av_read_frame(tsDemuxer->formatContext, &pkt);
        if (ret != HI_SUCCESS || pkt.size <= 0) {
            FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "parse audio&video frame failed\n");
            break;
        }

        if (!isFindAudio && (pkt.stream_index == tsDemuxer->audioStreamIndex)) {
            AacMediaInfo aacInfo;
            isFindAudio = HI_TRUE;
            if (memset_s(&aacInfo, sizeof(AacMediaInfo), 0x00, sizeof(AacMediaInfo)) != EOK) {
                FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "set  audio&video aacInfo failed\n");
            }
            (HI_VOID)TsParseAudioFrame(pkt.data, pkt.size, &aacInfo);
            tsDemuxer->sampleRate = aacInfo.sampleRate;
            tsDemuxer->chnnum = aacInfo.channelNum;
        } else if ((pkt.stream_index == tsDemuxer->usedVideoStreamIndex) &&
            (tsDemuxer->stSteamResolution[pkt.stream_index].s32VideoStreamIndex == HI_DEMUXER_NO_MEDIA_STREAM)) {
            isFindVideo = HI_TRUE;
            TsProbeUsedVideoByParserFrame(tsDemuxer, &pkt);
            parsedVidNum++;
        } else if ((pkt.stream_index != tsDemuxer->usedVideoStreamIndex) &&
            tsDemuxer->stSteamResolution[pkt.stream_index].s32VideoStreamIndex == HI_DEMUXER_NO_MEDIA_STREAM &&
            (tsDemuxer->formatContext->streams[pkt.stream_index]->codecpar->codec_id == AV_CODEC_ID_H264 ||
            tsDemuxer->formatContext->streams[pkt.stream_index]->codecpar->codec_id == AV_CODEC_ID_HEVC)) {
            TsProbeUnusedVideoByParserFrame(tsDemuxer, &pkt);
            parsedVidNum++;
        }

        av_packet_unref(&pkt);
    }

    if (avio_seek(tsDemuxer->formatContext->pb, 0, SEEK_SET) < 0) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "ts seek to file begin error\n");
        ret = HI_FAILURE;
    }
    avformat_flush(tsDemuxer->formatContext);

    return ret;
}

HI_S32 TsSetAudioInfo(FfDemuxFormatMember *tsDemuxer, HI_FORMAT_PACKET_S *fmtFrame)
{
    AacMediaInfo aacInfo = { 0 };

    if (TsIsMultiAdtsAudioFrame(tsDemuxer->pkt.data, tsDemuxer->pkt.size)) {
        av_packet_unref(&tsDemuxer->pkt);
        return HI_ERR_DEMUXER_READ_PACKET;
    }
    fmtFrame->pu8Data = tsDemuxer->pkt.data;
    fmtFrame->u32Size = tsDemuxer->pkt.size;
    HI_S64 readPts = av_rescale_q(tsDemuxer->pkt.pts, tsDemuxer->audioStream->time_base, AV_TIME_BASE_Q);
    if (readPts < tsDemuxer->formatContext->start_time) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "audio pts is small than start_time\n");
    } else {
        tsDemuxer->lastReadPts = readPts - tsDemuxer->formatContext->start_time;
    }

    HI_S32 ret = TsParseAudioFrame(fmtFrame->pu8Data, fmtFrame->u32Size, &aacInfo);
    if ((ret != HI_SUCCESS) || (aacInfo.frameLength != fmtFrame->u32Size)) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR, "aac frame len %d not eq to pkt len:  %d\n",
                       aacInfo.frameLength, fmtFrame->u32Size);
    }
    fmtFrame->u32Size = aacInfo.frameLength;
    return HI_SUCCESS;
}

HI_S32 TsSetVideoInfo(FfDemuxFormatMember *tsDemuxer, HI_FORMAT_PACKET_S *fmtFrame)
{
    HI_S64 readPts = av_rescale_q(tsDemuxer->pkt.pts, tsDemuxer->videoStream->time_base, AV_TIME_BASE_Q);
    if (readPts < tsDemuxer->formatContext->start_time) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_INFO,
                       "video pts is small than start_time readPts: %lld \n", readPts);
    } else {
        tsDemuxer->lastReadPts = readPts - tsDemuxer->formatContext->start_time;
    }
    fmtFrame->pu8Data = tsDemuxer->pkt.data;
    fmtFrame->u32Size = tsDemuxer->pkt.size;
    return HI_SUCCESS;
}

HI_S32 TsProbeAudio(FfDemuxFormatMember *tsDemuxer)
{
    /* audio stream could not been parsed, so we just use it */
    HI_U32 i;
    for (i = 0; i < tsDemuxer->formatContext->nb_streams; i++) {
        if (tsDemuxer->formatContext->streams[i]->codecpar->codec_id == AV_CODEC_ID_AAC) {
            FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_INFO, "audio codec type  %s\n",
                avcodec_get_name(tsDemuxer->formatContext->streams[i]->codecpar->codec_id));
            break;
        }
    }

    if (i == tsDemuxer->formatContext->nb_streams) {
        FFDEMUX_PRINTF(MODULE_NAME_DEMUX, HI_FFDEMUX_LOG_LEVEL_ERR,
            "Could not find AAC audio stream in input file '%s'\n", tsDemuxer->filePath);
        tsDemuxer->audioStreamIndex = HI_DEMUXER_NO_MEDIA_STREAM;
        tsDemuxer->audioStream = HI_NULL;
    } else {
        tsDemuxer->audioStreamIndex = i;
        tsDemuxer->audioStream = tsDemuxer->formatContext->streams[i];
    }

    return HI_SUCCESS;
}

#ifdef __cplusplus
#if __cplusplus
}
#endif
#endif /* __cplusplus */
