/*!
\copyright  Copyright (c) 2020-2025 Qualcomm Technologies International, Ltd.
            All Rights Reserved.
            Qualcomm Technologies International, Ltd. Confidential and Proprietary.
\file
    \ingroup le_unicast_manager
\brief      Implementation of the audio source interface for LE music sources.
*/

#ifndef LE_UNICAST_MUSIC_SOURCE_C_
#define LE_UNICAST_MUSIC_SOURCE_C_

#if defined(INCLUDE_LE_AUDIO_UNICAST)

#include "le_unicast_manager_instance.h"
#include "le_unicast_manager_private.h"
#include "le_unicast_music_source.h"

#include "audio_lea_config.h"
#include "audio_sources.h"
#include "audio_sources_audio_interface.h"
#include "audio_router_connect.h"
#include "bt_device.h"
#include "gatt_connect.h"
#include "kymera.h"
#include "kymera_adaptation.h"
#include "kymera_adaptation_audio_protected.h"
#include "ui.h"
#include "volume_types.h"
#include "ltv_utilities.h"
#include "gatt.h"
#include "media_control_client.h"
#include "peer_signalling.h"

#ifdef INCLUDE_AUDIO_STACK
#include "generic_source.h"
#include <generic_source_audio_stack_params.h>
#include <audio_stack_parameters.h>
#include <sample_rate_key_source.h>
#include <sample_rate_key_sink.h>
#include <codec_key.h>
#endif


#include <volume_utils.h>
#include <logging.h>
#include <panic.h>
#include <stdlib.h>


static bool leUnicastMusicSource_GetAudioConnectParameters(audio_source_t source, source_defined_params_t *source_params);
static void leUnicastMusicSource_FreeAudioConnectParameters(audio_source_t source, source_defined_params_t *source_params);
static bool leUnicastMusicSource_GetAudioDisconnectParameters(audio_source_t source, source_defined_params_t *source_params);
static void leUnicastMusicSource_FreeAudioDisconnectParameters(audio_source_t source, source_defined_params_t *source_params);
static bool leUnicastMusicSource_IsAudioRouted(audio_source_t source);
static bool leUnicastMusicSource_IsAudioAvailable(audio_source_t source);
static source_status_t leUnicastMusicSource_SetState(audio_source_t source, source_state_t state);
static source_state_t leUnicastMusicSource_GetState(audio_source_t source);

#ifdef INCLUDE_AUDIO_STACK
static bool leUnicastMusicSource_GetUseCasePrepareParams(generic_source_t generic_source, source_defined_params_t *source_params);
static bool leUnicastMusicSource_GetUseCaseStartParams(generic_source_t generic_source, source_defined_params_t *source_params);
static void leUnicastMusicSource_ReleaseUseCaseParams(generic_source_t generic_source, source_defined_params_t *source_params);

static const generic_source_audio_stack_if audio_stack_if =
{
    .GetUseCasePrepareParams = leUnicastMusicSource_GetUseCasePrepareParams,
    .ReleaseUseCasePrepareParams = leUnicastMusicSource_ReleaseUseCaseParams,
    .GetUseCaseStartParams = leUnicastMusicSource_GetUseCaseStartParams,
    .ReleaseUseCaseStartParams = leUnicastMusicSource_ReleaseUseCaseParams,
};
#endif

static const audio_source_audio_interface_t music_if =
{
    .GetConnectParameters = leUnicastMusicSource_GetAudioConnectParameters,
    .ReleaseConnectParameters = leUnicastMusicSource_FreeAudioConnectParameters,
    .GetDisconnectParameters = leUnicastMusicSource_GetAudioDisconnectParameters,
    .ReleaseDisconnectParameters = leUnicastMusicSource_FreeAudioDisconnectParameters,
    .IsAudioRouted = leUnicastMusicSource_IsAudioRouted,
    .IsAudioAvailable = leUnicastMusicSource_IsAudioAvailable,
    .SetState = leUnicastMusicSource_SetState,
    .GetState = leUnicastMusicSource_GetState,
    .Device = leUnicastManager_GetBtAudioDevice,
};

static inline appKymeraLeCodecId leUnicastMusicSource_CopyCodecId(GattAscsCodecId gatt_codec_id)
{
    appKymeraLeCodecId codec_id = {
                                .codingFormat = gatt_codec_id.codingFormat,
                                .companyId = gatt_codec_id.companyId,
                                .vendorSpecificCodecId = gatt_codec_id.vendorSpecificCodecId
                                  };
    return codec_id;
}

/* \brief Extract audio parameters from Codec and Qos Information */
static bool leUnicastMusicSource_ExtractAudioParameters(le_um_instance_t *inst,
                                                        multidevice_side_t side,
                                                        le_audio_connect_parameters_t *conn_param)
{
    bool result = FALSE;
    le_um_ase_t *sink_ase = NULL;
    le_um_ase_t *sink_ase_r = LeUnicastManager_InstanceGetRightSinkAse(inst);
    le_um_ase_t *source_ase = NULL;
    le_um_cis_t *cis_info = NULL;

    UNICAST_MANAGER_LOG("leUnicastMusicSource_ExtractAudioParameters");

    LeUnicastManager_GetAsesForGivenSide(inst, side, &sink_ase, &source_ase);

    conn_param->media_present = FALSE;
    conn_param->microphone_present = FALSE;

    /* Fill in the media Parameters */
    if (sink_ase != NULL && (sink_ase->state == le_um_ase_state_streaming || sink_ase->state == le_um_ase_state_routed))
    {
        conn_param->media.channels_to_render = LeUnicastManager_ExtractSpeakerIsoHandles(inst, sink_ase, &conn_param->media.iso_handle_list);
        conn_param->media_present = TRUE;
        conn_param->media.gaming_mode = LeUnicastManager_IsContextTypeGaming(inst->audio_context);
        conn_param->media.volume = AudioSources_CalculateOutputVolume(LeUnicastManager_GetAudioSourceForInstance(inst));
        conn_param->media.codec_frame_blocks_per_sdu = LeUnicastManager_GetCodecFrameBlocksPerSdu(sink_ase->codec_info);

        if (LeUnicastManager_isVSAptxLite(sink_ase->codec_info))
        {
            conn_param->media.frame_length = sink_ase->qos_info->maximumSduSize;
        }
        else
        {
            conn_param->media.frame_length = LeUnicastManager_GetFramelength(sink_ase->qos_info->maximumSduSize,
                                                 conn_param->media.codec_frame_blocks_per_sdu, LeUnicastManager_GetAudioLocation(sink_ase->codec_info));
        }

        conn_param->media.presentation_delay = sink_ase->qos_info->presentationDelay;
        conn_param->media.sample_rate = LeUnicastManager_GetSampleRate(sink_ase->codec_info);
        conn_param->media.frame_duration = LeUnicastManager_GetFrameDuration(sink_ase);
        conn_param->media.codec_type = leUnicastManager_GetCodecType(sink_ase->codec_info);
        conn_param->media.codec_id = leUnicastMusicSource_CopyCodecId(sink_ase->codec_info->codecId);
        conn_param->media.codec_version = sink_ase->codec_version;
        conn_param->media.stream_type = leUnicastManager_DetermineStreamType(sink_ase,
                                                                             (sink_ase_r != NULL && leUnicastManager_IsAseActive(sink_ase_r)) ?
                                                                              sink_ase_r : NULL);

        sink_ase->state = le_um_ase_state_routed;
        conn_param->media.is_multi_peripheral_cis = inst->mirror_type == le_um_cis_mirror_type_mirror_multiperipheral;
        result = TRUE;
        UNICAST_MANAGER_LOG("Speaker path frame_length :%d, Presentation delay: %d, Sample Rate %d, Frame duration %d, channels_to_render 0x%x",
                            conn_param->media.frame_length,
                            conn_param->media.presentation_delay,
                            conn_param->media.sample_rate,
                            conn_param->media.frame_duration,
                            conn_param->media.channels_to_render);

        UNICAST_MANAGER_LOG("codec_type enum:appKymeraLeAudioCodec:%d, Codec Version %d, Frame Blocks Per SDU %d, stream type enum:appKymeraLeStreamType:%d, gaming mode %d",
                             conn_param->media.codec_type,
                             conn_param->media.codec_version,
                             conn_param->media.codec_frame_blocks_per_sdu,
                             conn_param->media.stream_type,
                             conn_param->media.gaming_mode);
    }

    /* Fill in the microphone Parameters */
    if (source_ase != NULL)
    {
        cis_info = source_ase->cis_data;
        conn_param->microphone_present = TRUE;
        conn_param->microphone.codec_frame_blocks_per_sdu = LeUnicastManager_GetCodecFrameBlocksPerSdu(source_ase->codec_info);
        conn_param->microphone.frame_length = LeUnicastManager_isVSAptxLite(source_ase->codec_info) ? source_ase->qos_info->maximumSduSize :
                                              source_ase->qos_info->maximumSduSize / conn_param->microphone.codec_frame_blocks_per_sdu;
        conn_param->microphone.source_iso_handle = cis_info->cis_handle;
        conn_param->microphone.presentation_delay = source_ase->qos_info->presentationDelay;
        conn_param->microphone.frame_duration = LeUnicastManager_GetFrameDuration(source_ase);
        conn_param->microphone.sample_rate = LeUnicastManager_GetSampleRate(source_ase->codec_info);
        conn_param->microphone.codec_type = leUnicastManager_GetCodecType(source_ase->codec_info);
        conn_param->microphone.codec_id = leUnicastMusicSource_CopyCodecId(source_ase->codec_info->codecId);
        conn_param->microphone.codec_version = source_ase->codec_version;
        conn_param->microphone.mic_sync_needed = LeUnicastManager_IsBothSourceAseActive(inst);
        conn_param->microphone.audio_channel_mask = LeUnicastManager_GetAudioLocation(source_ase->codec_info);
        source_ase->state = le_um_ase_state_routed;
        /* Don't move source ASE state to streaming as it needs to be done only after
         * getting ReadyToReceive unicast client
         */
        PanicFalse(conn_param->microphone.codec_type != KYMERA_LE_AUDIO_CODEC_APTX_ADAPTIVE);
        result = TRUE;
        UNICAST_MANAGER_LOG("Microphone path frame_length: %d, Presentation delay: %d, Sample Rate %d, Frame duration %d, codec_type %d, Codec Version %d, Frame Blocks Per SDU %d, audio_channel_mask 0x%x",
                            conn_param->microphone.frame_length,
                            conn_param->microphone.presentation_delay,
                            conn_param->microphone.sample_rate,
                            conn_param->microphone.frame_duration,
                            conn_param->microphone.codec_type,
                            conn_param->microphone.codec_version,
                            conn_param->microphone.codec_frame_blocks_per_sdu,
                            conn_param->microphone.audio_channel_mask);
    }

    return  result;
}

/* \brief Get Audio Connect parameters */
static bool leUnicastMusicSource_GetAudioConnectParameters(audio_source_t source, source_defined_params_t *source_params)
{
    bool populate_success = FALSE;
    le_um_instance_t *inst = LeUnicastManager_InstanceGetByAudioSource(source);

    UNICAST_MANAGER_LOG("leUnicastMusicSource_GetAudioConnectParameters");
    PanicNull(source_params);

    if (inst)
    {
        le_audio_connect_parameters_t *conn_param =
            (le_audio_connect_parameters_t *)PanicUnlessMalloc(sizeof(le_audio_connect_parameters_t));
        memset(conn_param, 0, sizeof(le_audio_connect_parameters_t));

        if (leUnicastMusicSource_ExtractAudioParameters(inst, Multidevice_GetSide(), conn_param))
        {
            source_params->data = (void *)conn_param;
            source_params->data_length = sizeof(le_audio_connect_parameters_t);
            populate_success = TRUE;
        }
        else
        {
            /* Free the allocated audio connect parameter */
            free(conn_param);
        }
    }

    return populate_success;
}

static void leUnicastMusicSource_FreeAudioConnectParameters(audio_source_t source,
                                                            source_defined_params_t *source_params)
{
    if (AudioSource_IsLeUnicastSource(source))
    {
        PanicNull(source_params);
        PanicFalse(source_params->data_length == sizeof(le_audio_connect_parameters_t));
        if(source_params->data_length)
        {
        free(source_params->data);
        source_params->data = (void *)NULL;
        source_params->data_length = 0;
    }
}
}

static bool leUnicastMusicSource_GetAudioDisconnectParameters(audio_source_t source,
                                                              source_defined_params_t *source_params)
{
    UNUSED(source_params);
    UNUSED(source);
    return TRUE;
}

static void leUnicastMusicSource_FreeAudioDisconnectParameters(audio_source_t source,
                                                               source_defined_params_t *source_params)
{
    UNUSED(source_params);
    UNUSED(source);
}

static bool leUnicastMusicSource_IsAudioRouted(audio_source_t source)
{
    le_um_instance_t *inst = LeUnicastManager_InstanceGetByAudioSource(source);

    if (inst)
    {
        /* If ASE are active and the context is media, then we are as good as audio being
            available, because AG needs to start CIS after receiving ASE enable. By allowing
            this, we get focus from audio router and allow us to start the session quickly */
        if (LeUnicastManager_IsContextOfTypeMedia(inst->audio_context) &&
            LeUnicastManager_IsAnyAseEnabled(inst))
        {
            return TRUE;
        }
    }

    return FALSE;
}

#ifdef INCLUDE_AUDIO_STACK
static bool leUnicastMusicSource_GetUseCasePrepareParams(generic_source_t generic_source, source_defined_params_t *source_params)
{
    bool result = FALSE;
    UNICAST_MANAGER_LOG("leUnicastMusicSource_GetUseCasePrepareParams");
    PanicNull(source_params);

    source_defined_params_t handset_params;
    memset(source_params, 0, sizeof(*source_params));
    audio_source_t source = generic_source.u.audio;

    if(leUnicastMusicSource_GetAudioConnectParameters(source, &handset_params))
    {
        le_audio_connect_parameters_t *conn_param = handset_params.data;
        PanicNull(conn_param);

        generic_source_audio_stack_params_t *params = PanicNull(calloc(1, sizeof(*params)));
        params->kv_list = KeyValueList_Create();

        result = TRUE;

        if(conn_param->media_present)
        {
            as_param_source_channels_t source_channels = CHANNEL_MONO;
            as_param_sink_channel_t sink_channel = CHANNEL_MONO;
            as_param_source_type_t source_type = SOURCE_TYPE_LEA;

            if (Multidevice_IsDeviceStereo())
            {
                sink_channel = CHANNEL_STEREO;
                if(conn_param->media.stream_type == KYMERA_LE_STREAM_MONO)
                {
                    source_type = SOURCE_TYPE_LEA_MONO;
                }
                else
                {
                    source_channels = CHANNEL_STEREO;
                }
            }

            DEBUG_LOG_VERBOSE("leUnicastMusicSource_GetUseCasePrepareParams enum:appKymeraLeStreamType:%d source_iso_handle left 0x%X, right 0x%X", conn_param->media.stream_type, conn_param->media.iso_handle_list.handles[0].handle, conn_param->media.iso_handle_list.handles[1].handle);

            /* Select LEA DUAL source endpoint in case of Dual CIS carrying Stereo usecase
             * and when both or single channel needs to be used
             */
            if(conn_param->media.stream_type == KYMERA_LE_STREAM_DUAL_MONO ||
               conn_param->media.stream_type == KYMERA_LE_STREAM_DUAL_MONO_USE_LEFT ||
               conn_param->media.stream_type == KYMERA_LE_STREAM_DUAL_MONO_USE_RIGHT)
            {
                source_type = SOURCE_TYPE_LEA_DUAL;
                source_channels = CHANNEL_STEREO;
                as_param_stream_source_b_t iso_source = (as_param_stream_source_b_t)StreamIsoSource(conn_param->media.iso_handle_list.handles[1].handle);
                if (iso_source)
                {
                    KeyValueList_Add(params->kv_list, AS_PARAM_STREAM_SOURCE_B , &iso_source, sizeof(iso_source));
                }
                else
                {
                    result = FALSE;
                }
            }

            DEBUG_LOG_VERBOSE("leUnicastMusicSource_GetUseCasePrepareParams source is enum:as_param_source_channels_t:%d sink is enum:as_param_sink_channel_t:%d", source_channels, sink_channel);
            KeyValueList_Add(params->kv_list, AS_PARAM_SOURCE_CHANNELS, &source_channels, sizeof(as_param_source_channels_t));
            KeyValueList_Add(params->kv_list, AS_PARAM_SINK_CHANNEL, &sink_channel, sizeof(as_param_sink_channel_t));
            KeyValueList_Add(params->kv_list, AS_PARAM_SOURCE_TYPE, &source_type, sizeof(as_param_source_type_t));

            KeyValueList_Add(params->kv_list, AS_PARAM_IS_AUDIO_SOURCE_PRESENT, &conn_param->media_present, sizeof(as_param_is_audio_source_present_t));

            as_param_stream_source_a_t iso_source = (as_param_stream_source_a_t)StreamIsoSource(conn_param->media.iso_handle_list.handles[0].handle);
            if (iso_source)
            {
                KeyValueList_Add(params->kv_list, AS_PARAM_STREAM_SOURCE_A, &iso_source, sizeof(iso_source));
            }
            else
            {
                result = FALSE;
            }

            KeyValueList_Add(params->kv_list, AS_PARAM_IN_GAMING_MODE, &conn_param->media.gaming_mode, sizeof(as_param_in_gaming_mode_t));
            if (conn_param->media.gaming_mode)
            {
                as_param_audio_kick_period_ms_t kick_period_ms = 2;
                KeyValueList_Add(params->kv_list, AS_PARAM_AUDIO_KICK_PERIOD_MS, &kick_period_ms, sizeof(as_param_audio_kick_period_ms_t));
            }

            /* Volume is not using discrete steps in this example, final decision is pending */
            as_param_volume_t volume = VolumeUtils_GetVolumeInDspGain(conn_param->media.volume);
            KeyValueList_Add(params->kv_list, AS_PARAM_VOLUME, &volume, sizeof(as_param_volume_t));

            KeyValueList_Add(params->kv_list, AS_PARAM_SOURCE_CODEC_FRAME_BLOCKS_PER_SDU, &conn_param->media.codec_frame_blocks_per_sdu, sizeof(as_param_source_codec_frame_blocks_per_sdu_t));

            KeyValueList_Add(params->kv_list, AS_PARAM_SOURCE_FRAME_LENGTH, &conn_param->media.frame_length, sizeof(as_param_source_frame_length_t));

            KeyValueList_Add(params->kv_list, AS_PARAM_SOURCE_PRESENTATION_DELAY, &conn_param->media.presentation_delay, sizeof(as_param_source_presentation_delay_t));

            SampleRateKey_AddSourceSampleRate(params->kv_list, conn_param->media.sample_rate);

            as_param_source_frame_duration_t frame_duration = (as_param_source_frame_duration_t) conn_param->media.frame_duration;
            KeyValueList_Add(params->kv_list, AS_PARAM_SOURCE_FRAME_DURATION, &frame_duration, sizeof(as_param_source_frame_duration_t));

            as_param_source_codec_t codec = CodecKey_AddLeAudioSourceCodec(params->kv_list, conn_param->media.codec_type);

            KeyValueList_Add(params->kv_list, AS_PARAM_SOURCE_CODEC_VERSION, &conn_param->media.codec_version, sizeof(as_param_source_codec_version_t));

            if (codec == SOURCE_CODEC_APTX_ADAPTIVE)
            {
                as_param_source_predecode_duration_t predecode_duration = conn_param->media.frame_duration + LEA_APTX_ADAPTIVE_ADDITIONAL_PREDECODE_DURATION;
                KeyValueList_Add(params->kv_list, AS_PARAM_SOURCE_PREDECODE_DURATION, &predecode_duration, sizeof(predecode_duration));
            }

            CodecKey_AddLeaChannelAndSideKeys(params->kv_list, codec, conn_param->media.stream_type);
        }

        /* Fill in the microphone Parameters for VBC */
        if (conn_param->microphone_present)
        {
            DEBUG_LOG_VERBOSE("leUnicastMusicSource_GetUseCasePrepareParams microphone is present, Setting VBC");

            KeyValueList_Add(params->kv_list, AS_PARAM_IS_MIC_PRESENT, &conn_param->microphone_present, sizeof(as_param_is_mic_present_t));

            KeyValueList_Add(params->kv_list, AS_PARAM_MIC_FRAME_DURATION, &conn_param->microphone.frame_duration, sizeof(as_param_mic_frame_duration_t));

            as_param_mic_sink_a_t mic_sink = (as_param_mic_sink_a_t)StreamIsoSink(conn_param->microphone.source_iso_handle);
            if (mic_sink)
            {
                KeyValueList_Add(params->kv_list, AS_PARAM_MIC_SINK_A, &mic_sink, sizeof(mic_sink));
            }
            else
            {
                result = FALSE;
            }

            KeyValueList_Add(params->kv_list, AS_PARAM_MIC_PRESENTATION_DELAY, &conn_param->microphone.presentation_delay, sizeof(as_param_mic_presentation_delay_t));

            KeyValueList_Add(params->kv_list, AS_PARAM_MIC_CODEC_FRAME_BLOCK_PER_SDU , &conn_param->microphone.codec_frame_blocks_per_sdu, sizeof(as_param_mic_codec_frame_block_per_sdu_t));

            KeyValueList_Add(params->kv_list, AS_PARAM_MIC_FRAME_LENGTH, &conn_param->microphone.frame_length, sizeof(as_param_mic_frame_length_t));

            as_param_voice_transport_type_t voice_transport = VOICE_TRANSPORT_ISO;
            KeyValueList_Add(params->kv_list, AS_PARAM_VOICE_TRANSPORT_TYPE, &voice_transport, sizeof(voice_transport));

            as_param_voice_codec_t voice_codec;
            voice_codec = CodecKey_AddLeaVoiceCodec(params->kv_list, conn_param->media_present ? conn_param->media.codec_type :
                                                                                                 conn_param->microphone.codec_type);

            if(voice_codec == VOICE_CODEC_APTXLITE)
            {
                CodecKey_AddAptxLiteEncoderConfigKey(params->kv_list);
            }

            SampleRateKey_AddSinkSampleRate(params->kv_list, conn_param->microphone.sample_rate);

            if(appPeerSigIsConnected() && conn_param->microphone.mic_sync_needed)
            {
                /* Start Mic chain muted until both MICs are syncronized */
                bool is_mic_muted = TRUE;
                KeyValueList_Add(params->kv_list, AS_PARAM_IS_MIC_MUTED, &is_mic_muted, sizeof(is_mic_muted));
            }
        }

        source_params->data = params;
        source_params->data_length = sizeof(*params);
        leUnicastMusicSource_FreeAudioConnectParameters(source, &handset_params);
    }
    return result;
}

static bool leUnicastMusicSource_GetUseCaseStartParams(generic_source_t generic_source, source_defined_params_t *source_params)
{
    DEBUG_LOG_VERBOSE("leUnicastMusicSource_GetUseCaseStartParams");
    generic_source_audio_stack_params_t *params = PanicNull(calloc(1, sizeof(*params)));

    PanicNull(source_params);
    source_params->data = params;
    source_params->data_length = sizeof(*params);
    return TRUE;
}

static void leUnicastMusicSource_ReleaseUseCaseParams(generic_source_t generic_source, source_defined_params_t *source_params)
{
    PanicNull(source_params);
    if (source_params->data)
    {
        generic_source_audio_stack_params_t *params = source_params->data;
        PanicFalse(source_params->data_length == sizeof(*params));
        KeyValueList_Destroy(&params->kv_list);
        free(params);
        memset(source_params, 0, sizeof(*source_params));
    }
}
#endif

static bool leUnicastMusicSource_IsAudioAvailable(audio_source_t source)
{
    le_um_instance_t *inst = LeUnicastManager_InstanceGetByAudioSource(source);

    if (inst)
    {
        /* If ASE are active and the context is media, then we are as good as audio being
           available, because AG needs to start CIS after receiving ASE enable. By allowing
           this, we get focus from audio router and allow us to start the session quickly */
        if (LeUnicastManager_IsContextOfTypeMedia(inst->audio_context) &&
            LeUnicastManager_IsAnyAseEnabled(inst))
        {
            return TRUE;
        }
    }

    return FALSE;
}
static source_status_t leUnicastMusicSource_SetState(audio_source_t source, source_state_t state)
{
    source_status_t source_status = source_status_ready;
    le_um_instance_t *inst = LeUnicastManager_InstanceGetByAudioSource(source);
    generic_source_t gen_source;

    gen_source.type = source_type_audio;
    gen_source.u.audio = source;

    UNICAST_MANAGER_LOG("leUnicastMusicSource_SetState source enum:audio_source_t:%u state enum:source_state_t:%u", source, state);
    if (inst != NULL)
    {
        switch (state)
        {
            case source_state_disconnecting:
            {
                source_status = leUnicastManager_HandleSourceInDisconnectingState(inst, gen_source);
            }
            break;

            case source_state_disconnected:
            {
#ifdef ENABLE_LE_MULTIPOINT_BACKGROUND_CIS_SUPPORT
                if (inst->source_state == source_state_invalid)
                {
                    /* This source got added into audio router, but did not got focus, because of some high priority
                     * source already running. Goahead and push this for background processing.
                     */
                    source_status = leUnicastManager_StartUnicastSession(inst, FALSE, gen_source);
                }
                else
#endif
                {
                    if (leUnicastMusicSource_IsAudioRouted(source))
                    {
                         if (MediaClientControl_GetAudioSourceContext(inst->cid) == context_audio_is_playing)
                         {
                             AudioSources_Pause(source);
                         }
                    }

                    /* End the current unicast music session */
                    leUnicastManager_EndUnicastSession(inst);
                }
            }
            break;

            case source_state_connecting:
            {
                /*  The audio source has got focus for streaming.If all CIS's are connected and
                 *  data path is ready, return that we are ready for streaming. Else the
                 *  following conditions can prevail:
                 *  1) None of the isochronous streams are connected yet.
                 *  2) CIS's are partially connected.
                 *  3) All CIS's are connected, but data path is not created yet.
                 *
                 *  Under such conditions, start setting up the unicast session.
                 */
                if (!LeUnicastManager_IsInstanceReadyForStreaming(inst)
                   )
                {
                    source_status = leUnicastManager_StartUnicastSession(inst, TRUE, gen_source);
                }
            }
            break;

            default:
            break;
        }

        inst->source_state = state;
    }

    return source_status;
}

static source_state_t leUnicastMusicSource_GetState(audio_source_t source)
{
    source_state_t state = source_state_invalid;
    le_um_instance_t *inst = LeUnicastManager_InstanceGetByAudioSource(source);

    if (inst != NULL)
    {
        state = inst ->source_state;
    }

    return state;
}

void LeUnicastMusicSource_Reconfig(le_um_instance_t *inst)
{
    bool status = FALSE;

    generic_source_t source =
    {
        .type = source_type_audio,
        .u =
        {
            .audio = LeUnicastManager_GetAudioSourceForInstance(inst)
        }
    };

    AudioRouter_DisconnectAudioSource(source);
    status = AudioRouter_ConnectAudioSource(source);

    UNICAST_MANAGER_LOG("LeUnicastMusicSource_Reconfig: status %d", status);
}

void LeUnicastMusicSource_Init(void)
{
#ifdef INCLUDE_AUDIO_STACK
    GENERIC_AUDIO_SOURCE_MAKE(audio_source_le_audio_unicast_1);
    GenericSource_RegisterAudioStackInterface(generic_audio_source_le_audio_unicast_1, &audio_stack_if);
    GENERIC_AUDIO_SOURCE_MAKE(audio_source_le_audio_unicast_2);
    GenericSource_RegisterAudioStackInterface(generic_audio_source_le_audio_unicast_2, &audio_stack_if);
#endif
    AudioSources_RegisterAudioInterface(audio_source_le_audio_unicast_1, &music_if);
    AudioSources_RegisterAudioInterface(audio_source_le_audio_unicast_2, &music_if);
}

#endif /* defined(INCLUDE_LE_AUDIO_UNICAST) */



#endif /* LE_UNICAST_MUSIC_SOURCE_C_ */
