/*!
\copyright  Copyright (c) 2023 - 2025 Qualcomm Technologies International, Ltd.
            All Rights Reserved.
            Qualcomm Technologies International, Ltd. Confidential and Proprietary.
\file
\brief      Kymera A2DP for stereo
*/

#if defined(INCLUDE_MIRRORING) && defined(ENABLE_TWM_STEREO)
#include "kymera_a2dp.h"
#include "kymera_a2dp_private.h"
#include "kymera_dsp_clock.h"
#include "kymera_buffer_utils.h"
#include "kymera_state.h"
#include "kymera_output_if.h"
#include "kymera_source_sync.h"
#include "kymera_latency_manager.h"
#include "kymera_music_processing.h"
#include "kymera_leakthrough.h"
#include "kymera_config.h"
#include "kymera_data.h"
#include "kymera_internal_msg_ids.h"
#include "kymera_setup.h"
#include "timestamp_event.h"
#include "av.h"
#include "a2dp_profile_config.h"
#include "multidevice.h"
#include "mirror_profile_protected.h"
#include "mirror_profile_audio_source.h"
#include "handset_service.h"
#if defined(INCLUDE_MUSIC_PROCESSING)
#include "user_eq_notification.h"
#endif
#include <operators.h>
#include <logging.h>
#include <aptx_ad_utils.h>
#include <vm.h>
#include "sync_start_a2dp.h"

/*! Helper defines for RTP header format. These
    are used for hash transform configuration */
#define RTP_HEADER_LENGTH (12)
#define RTP_HEADER_SEQUENCE_NO_OFFSET (2)


#ifdef KYMERA_PIO_TOGGLE
#include "pio.h"
#define KYMERA_PIO_MASK (1<<21)
#define KymeraPioSet() PioSet32Bank(0, KYMERA_PIO_MASK, KYMERA_PIO_MASK)
#define KymeraPioClr() PioSet32Bank(0, KYMERA_PIO_MASK, 0)
#else
#define KymeraPioSet()
#define KymeraPioClr()
#endif

static bool appKymeraA2dpGetPreferredChainOutput(kymera_output_chain_config *config);
static const output_callbacks_t appKymeraA2dpStereoMirrorCallbacks =
{
   .OutputGetPreferredChainConfig = appKymeraA2dpGetPreferredChainOutput,
};

static const output_registry_entry_t output_info_stereo_mirror =
{
    .user = output_user_a2dp,
    .connection = output_connection_stereo, /* defaut is stereo connection, but can change run time */
    .callbacks = &appKymeraA2dpStereoMirrorCallbacks,
};

static const output_registry_entry_t output_info_mono_mirror =
{
    .user = output_user_a2dp_spk_mono,
    .connection = output_connection_mono,
    .callbacks = &appKymeraA2dpStereoMirrorCallbacks,
};

static void appKymeraA2dpPopulateOutputChainConfig(a2dp_params_getter_t a2dp_params, kymera_output_chain_config *config)
{
    unsigned kick_period = KICK_PERIOD_FAST;
    unsigned block_size = DEFAULT_CODEC_BLOCK_SIZE;
    unsigned kp_multiplier = 5;
    unsigned kp_divider = 2;
    unsigned input_terminal_delta_buffer_size = 0;
    uint32   music_processing_kick_back_threshold = 256;

    DEBUG_LOG("appKymeraA2dpPopulateOutputChainConfig");

    switch (a2dp_params.seid)
    {
        case AV_SEID_SBC_SNK:
            kick_period = KICK_PERIOD_MASTER_SBC;
            block_size = SBC_CODEC_BLOCK_SIZE;
            break;

        case AV_SEID_AAC_SNK:
            kick_period = KICK_PERIOD_MASTER_AAC;
            block_size = AAC_CODEC_BLOCK_SIZE;
            music_processing_kick_back_threshold = block_size;

           /* increase the output buffer of source_sync by 4times kp */
           kp_multiplier = 4;
           kp_divider = 0;
           /* if AEC Ref is included in the audio graph, then there are possibilities that
               AAC audio graph could have MIPS issue when graph is running @ 32MHz.
               So, input terminal buffer for source_sync operator should have some extra delta
               to offset this issue */
            if(Kymera_OutputIsAecAlwaysUsed() && appKymera_IsSpeakerStereo(KymeraGetTaskData()))
            {
                /* the delta increase in buffer size should be calculated such that the overall
                    terminal buffer size should be smaller than 2*decoder_block_size */
                input_terminal_delta_buffer_size = 500;
            }
            break;

        case AV_SEID_APTX_SNK:
        case AV_SEID_APTXHD_SNK:
            kick_period = KICK_PERIOD_MASTER_APTX;
            block_size = APTX_CODEC_BLOCK_SIZE;
        break;

#ifdef INCLUDE_APTX_ADAPTIVE
        case AV_SEID_APTX_ADAPTIVE_SNK:

            if(appKymeraIsAptxR22Enabled())
            {
                /* slow kick period for 2.2 */
                kick_period = KICK_PERIOD_MASTER_APTX;
                block_size = APTX_ADAPTIVE_CODEC_BLOCK_SIZE;
            }
            else
            {
                /* Fast kick period */
                kick_period = KICK_PERIOD_MASTER_APTX_ADAPTIVE;
                block_size = APTX_CODEC_BLOCK_SIZE;
            }
        break;
#endif

        default :
            Panic();
            break;
    }

    if (Kymera_FastKickPeriodInGamingMode() && Kymera_LatencyManagerIsGamingModeEnabled())
    {
        kick_period = KICK_PERIOD_FAST;
    }

    config->rate = a2dp_params.rate;
    config->kick_period = kick_period;
    config->source_sync_kick_back_threshold = block_size;
    if(Kymera_IsMusicProcessingPresent() && appKymera_IsStereoMirroring(KymeraGetTaskData()))
        config->source_sync_kick_back_threshold = music_processing_kick_back_threshold;

    if (kick_period == KICK_PERIOD_SLOW)
    {
        config->source_sync_max_period = appKymeraGetSlowKickSourceSyncPeriod(TRUE);
        config->source_sync_min_period = appKymeraGetSlowKickSourceSyncPeriod(FALSE);
    }
    else if (kick_period == KICK_PERIOD_FAST)
    {
        config->source_sync_max_period = appKymeraGetFastKickSourceSyncPeriod(TRUE);
        config->source_sync_min_period = appKymeraGetFastKickSourceSyncPeriod(FALSE);
    }
    config->set_source_sync_min_period = TRUE;
    config->set_source_sync_max_period = TRUE;
    config->set_source_sync_kick_back_threshold = TRUE;

    /* Output buffer is 2.5*KP or 4*KP (if AEC Ref is in the audio chain) */
    appKymeraSetSourceSyncConfigOutputBufferSize(config, kp_multiplier, kp_divider);
    appKymeraSetSourceSyncConfigInputBufferSize(config, (block_size + input_terminal_delta_buffer_size));
    /* Output chain is stereo(both channels of DAC are enabled) in both standalone & TWM mode
     * If the output is mono then, mono signal is split and provided to the output chain to play
     * over both the channels of DAC */
    config->chain_type = output_chain_stereo;
}

static bool appKymeraA2dpGetA2dpParametersPrediction(uint32 *rate, uint8 *seid)
{
    const kymera_callback_configs_t *config = Kymera_GetCallbackConfigs();
    DEBUG_LOG("appKymeraA2dpGetA2dpParametersPrediction");
    if ((config) && (config->GetA2dpParametersPrediction))
    {
        return config->GetA2dpParametersPrediction(rate, seid);
    }
    return FALSE;
}

static bool appKymeraA2dpGetPreferredChainOutput(kymera_output_chain_config *config)
{
    uint32 rate;
    uint8 seid;
    bool a2dp_params_are_valid = appKymeraA2dpGetA2dpParametersPrediction(&rate, &seid);
    if (a2dp_params_are_valid)
    {
        a2dp_params_getter_t a2dp_params;
        a2dp_params.rate = rate;
        a2dp_params.seid = seid;

        appKymeraA2dpPopulateOutputChainConfig(a2dp_params, config);
    }
    return a2dp_params_are_valid;
}

static void appKymeraCreateInputChain(kymeraTaskData *theKymera, uint8 seid)
{
    const chain_config_t *config = NULL;
    DEBUG_LOG("appKymeraCreateInputChain");

    switch (seid)
    {
        case AV_SEID_SBC_SNK:
            DEBUG_LOG("Create SBC input chain");
            config = Kymera_GetChainConfigs()->chain_input_sbc_stereo_config;
            if(appKymera_IsStereoMirroringLeftOrRight(theKymera))
                config = Kymera_GetChainConfigs()->chain_input_sbc_stereo_mix_config;
        break;

        case AV_SEID_AAC_SNK:
            DEBUG_LOG("Create AAC input chain");
            config = Kymera_GetChainConfigs()->chain_input_aac_stereo_config;
            if(appKymera_IsStereoMirroringLeftOrRight(theKymera))
                config = Kymera_GetChainConfigs()->chain_input_aac_stereo_mix_config;
        break;

        case AV_SEID_APTX_SNK:
            DEBUG_LOG("Create aptX Classic input chain");
            config = Kymera_GetChainConfigs()->chain_input_aptx_stereo_config;
            if(appKymera_IsStereoMirroringLeftOrRight(theKymera))
            {
                if (appConfigEnableAptxStereoMix())
                {
                    config = Kymera_GetChainConfigs()->chain_input_aptx_stereo_mix_config;
                }
                else
                {
                    config = appKymera_IsStereoMirroringLeft(theKymera) ? Kymera_GetChainConfigs()->chain_forwarding_input_aptx_left_config :
                                       Kymera_GetChainConfigs()->chain_forwarding_input_aptx_right_config;
                }
            }
            else if(appKymera_IsStereoMirroring(theKymera))
            {
                /* it could so happen that while streaming aptx classic mono, TWM speaker might be configured for party mode
                   or vice-versa. Since we are not informing about this change to AG, need to have similar demux configuration
                   on both mono and stereo chain. */
                config = Kymera_GetChainConfigs()->chain_input_aptx_split_stereo_config;
            }
        break;

        case AV_SEID_APTXHD_SNK:
            DEBUG_LOG("Create aptX HD input chain");
            config = Kymera_GetChainConfigs()->chain_input_aptxhd_stereo_config;
            if(appKymera_IsStereoMirroringLeftOrRight(theKymera))
                {
                    /* Not supported */
                    Panic();
                }
        break;

#ifdef INCLUDE_APTX_ADAPTIVE
        case AV_SEID_APTX_ADAPTIVE_SNK:
             DEBUG_LOG("Create aptX Adaptive input chain");
             if(appKymera_IsStereoMirroringLeftOrRight(theKymera))
             {
                if (appConfigEnableAptxAdaptiveStereoMix())
                {
#ifdef INCLUDE_APTX_ADAPTIVE_22
                    if (theKymera->aptx_adaptive_r22_dec)
                    {
                        if (theKymera->q2q_mode)
                            config =  Kymera_GetChainConfigs()->chain_input_aptx_adaptive_r3_mono_q2q_config;
                        else
                            config =  Kymera_GetChainConfigs()->chain_input_aptx_adaptive_r3_mono_config;
                    }
                    else
#endif
                    {
                        if (theKymera->q2q_mode)
                            config =  Kymera_GetChainConfigs()->chain_input_aptx_adaptive_stereo_mix_q2q_config;
                        else
                            config =  Kymera_GetChainConfigs()->chain_input_aptx_adaptive_stereo_mix_config;
                    }

                }
                else
                {/* We do not support forwarding for aptX adaptive */
                    Panic();
                }
             }
             else
             {
#ifdef INCLUDE_APTX_ADAPTIVE_22
                 if(theKymera->aptx_adaptive_r22_dec)
                 {
                     if(theKymera->q2q_mode)
                         config = Kymera_GetChainConfigs()->chain_input_aptx_adaptive_r3_stereo_q2q_config;
                     else
                         config = Kymera_GetChainConfigs()->chain_input_aptx_adaptive_r3_stereo_config;
                 }
                 else
#endif  /* INCLUDE_APTX_ADAPIVE_22 */
                 {
                     if (theKymera->q2q_mode)
                         config =  Kymera_GetChainConfigs()->chain_input_aptx_adaptive_stereo_q2q_config;
                     else
                         config =  Kymera_GetChainConfigs()->chain_input_aptx_adaptive_stereo_config;
                 }
             }
        break;
#endif
        default:
            Panic();
        break;
    }

    /* Create input chain */
    theKymera->chain_input_handle = PanicNull(ChainCreate(config));
}

static uint32 appKymeraGetCodecMaxBitrate(uint8 seid)
{
    switch (seid)
    {
        // Use the same max bitrate, could be optimized if needed
        case AV_SEID_SBC_SNK:
        case AV_SEID_AAC_SNK:
        case AV_SEID_APTX_SNK:
            return APTX_STEREO_CODEC_RATE_KBPS * 1000;
        case AV_SEID_APTX_ADAPTIVE_SNK:
            return APTX_AD_CODEC_RATE_KBPS * 1000;
        default:
            Panic();
            return 0;
    }
}


static void appKymeraConfigureInputChain(kymeraTaskData *theKymera,
                                         uint8 seid, uint32 rate, uint32 max_bitrate,
                                         bool cp_header_enabled,
                                         aptx_adaptive_ttp_latencies_t nq2q_ttp)
{
    kymera_chain_handle_t chain_handle = theKymera->chain_input_handle;
    rtp_codec_type_t rtp_codec = -1;
    rtp_working_mode_t mode = rtp_decode;
    Operator op_aac_decoder;
#ifdef INCLUDE_APTX_ADAPTIVE
    Operator op;
#endif
    Operator op_rtp_decoder = ChainGetOperatorByRole(chain_handle, OPR_RTP_DECODER);
    uint32_t rtp_buffer_size = PRE_DECODER_BUFFER_SIZE;
    uint32_t max_aptx_bitrate = max_bitrate;
    DEBUG_LOG("appKymeraConfigureInputChain");

    if(appKymera_IsStereoMirroring(theKymera))
    {
        max_aptx_bitrate = (max_bitrate) ? max_bitrate : appKymeraGetCodecMaxBitrate(seid);
        rtp_buffer_size = Kymera_GetAudioBufferSize(max_bitrate, TWS_STANDARD_LATENCY_MAX_MS);
    }

    switch (seid)
    {
        case AV_SEID_SBC_SNK:
            DEBUG_LOG("configure SBC input chain");
            rtp_codec = rtp_codec_type_sbc;
        break;

        case AV_SEID_AAC_SNK:
            DEBUG_LOG("configure AAC input chain");
            rtp_codec = rtp_codec_type_aac;
            op_aac_decoder = PanicZero(ChainGetOperatorByRole(chain_handle, OPR_AAC_DECODER));
            OperatorsRtpSetAacCodec(op_rtp_decoder, op_aac_decoder);
        break;

        case AV_SEID_APTX_SNK:
            DEBUG_LOG("configure aptX Classic input chain");
            rtp_codec = rtp_codec_type_aptx;
            if(appKymera_IsStereoMirroring(theKymera))
            {
                op = PanicZero(ChainGetOperatorByRole(chain_handle, OPR_APTX_DEMUX));
                OperatorsStandardSetSampleRate(op, rate);
                op = PanicZero(ChainGetOperatorByRole(chain_handle, OPR_SWITCHED_PASSTHROUGH_CONSUMER));
                OperatorsSetSwitchedPassthruEncoding(op, spc_op_format_encoded);

                if (appConfigEnableAptxStereoMix())
                {
                    spc_mode_t sync_mode = spc_op_mode_tagsync_dual;
                    OperatorsSetSwitchedPassthruMode(op, sync_mode);
                }
            }
            else
            {
                if (!cp_header_enabled)
                {
                    mode = rtp_ttp_only;
                }
            }
        break;

        case AV_SEID_APTXHD_SNK:
            DEBUG_LOG("configure aptX HD input chain");
            rtp_codec = rtp_codec_type_aptx_hd;
        break;

#ifdef INCLUDE_APTX_ADAPTIVE
        case AV_SEID_APTX_ADAPTIVE_SNK:
            DEBUG_LOG("configure aptX adaptive input chain");
            aptx_adaptive_ttp_in_ms_t aptx_ad_ttp;
            uint32_t max_aptx_latency = APTX_ADAPTIVE_HQ_LATENCY_MS;

            if (theKymera->q2q_mode)
            {
                if(appKymera_IsSpeakerStereo(theKymera))
                {
                    if(appKymeraIsAptxR22Enabled() == TRUE)
                    {
                        max_aptx_bitrate = (max_bitrate) ? max_bitrate : (APTX_AD_LOSSLESS_CODEC_RATE_KBPS * 1000);
                        rtp_buffer_size = Kymera_GetAudioBufferSize(max_aptx_bitrate, TWS_STANDARD_LATENCY_MAX_MS);
                    }
                    else
                    {
                        max_aptx_bitrate = (rate == SAMPLE_RATE_96000) ? APTX_AD_CODEC_RATE_HS_QHS_96K_KBPS * 1000: APTX_AD_CODEC_RATE_QHS_48K_KBPS * 1000;
                        rtp_buffer_size = Kymera_GetAudioBufferSize(max_aptx_bitrate, max_aptx_latency);
                    }
                }

                op = PanicZero(ChainGetOperatorByRole(chain_handle, OPR_SWITCHED_PASSTHROUGH_CONSUMER));
                OperatorsSetSwitchedPassthruEncoding(op, spc_op_format_encoded);
                OperatorsStandardSetBufferSizeWithFormat(op, rtp_buffer_size, operator_data_format_encoded);
                OperatorsSetSwitchedPassthruMode(op, spc_op_mode_passthrough);
            }
            else
            {
                convertAptxAdaptiveTtpToOperatorsFormat(nq2q_ttp, &aptx_ad_ttp);
                getAdjustedAptxAdaptiveTtpLatencies(&aptx_ad_ttp);
                OperatorsRtpSetAptxAdaptiveTTPLatency(op_rtp_decoder, aptx_ad_ttp);
                rtp_codec = rtp_codec_type_aptx_ad;

                if(!appKymera_IsStereoMirroring(theKymera))
                {
                    max_aptx_bitrate = (rate == SAMPLE_RATE_96000) ? APTX_AD_CODEC_RATE_HS_NQHS_96K_KBPS * 1000 : APTX_AD_CODEC_RATE_NQHS_48K_KBPS *1000;
                    max_aptx_latency = aptx_ad_ttp.high_quality ;
                    rtp_buffer_size = Kymera_GetAudioBufferSize(max_aptx_bitrate, max_aptx_latency);
                }
            }


            op = PanicZero(ChainGetOperatorByRole(chain_handle, OPR_APTX_ADAPTIVE_DECODER));
            OperatorsStandardSetSampleRate(op, rate);

        break;
#endif

        default:
            Panic();
        break;
    }

    if(appKymera_IsStereoMirroring(theKymera))
    {
        /* Set stereo_lr_mix to FALSE to output 100% left/right from mixer */
        appKymeraSetStereoLeftRightMix(FALSE);

        DEBUG_LOG("appKymeraConfigureLeftRightMixer, rate = %lu, enable_left_right_mix = %d, is_left = %d", 
                   rate, theKymera->enable_left_right_mix, appKymera_IsStereoMirroringLeft(theKymera));
        appKymeraConfigureLeftRightMixer(chain_handle, rate, theKymera->enable_left_right_mix, appKymera_IsStereoMirroringLeft(theKymera));
    }

    if (!theKymera->q2q_mode) /* We don't use rtp decoder for Q2Q mode */
        appKymeraConfigureRtpDecoder(op_rtp_decoder, rtp_codec, mode, rate, cp_header_enabled, rtp_buffer_size);

    if(theKymera->chain_config_callbacks && theKymera->chain_config_callbacks->ConfigureA2dpInputChain)
    {
        kymera_a2dp_config_params_t params = {0};
        params.seid = seid;
        params.sample_rate = rate;
        params.max_bitrate = max_aptx_bitrate;
        params.nq2q_ttp = nq2q_ttp;
        theKymera->chain_config_callbacks->ConfigureA2dpInputChain(chain_handle, &params);
    }

    ChainConnect(chain_handle);
}

static void appKymeraCreateOutputChain(uint8 seid, uint32 rate)
{
    kymera_output_chain_config config = {0};
    a2dp_params_getter_t a2dp_params;
    output_users_t output_user = appKymera_IsStereoMirroringLeftOrRight(KymeraGetTaskData()) ? output_user_a2dp_spk_mono : output_user_a2dp;
    a2dp_params.seid = seid;
    a2dp_params.rate = rate;

    appKymeraA2dpPopulateOutputChainConfig(a2dp_params, &config);
    PanicFalse(Kymera_OutputPrepare(output_user, &config));
}

static void appKymeraJoinChains(kymeraTaskData *theKymera)
{
    output_source_t output = {0};
    output_users_t output_user = appKymera_IsStereoMirroringLeftOrRight(KymeraGetTaskData()) ? output_user_a2dp_spk_mono : output_user_a2dp;

    if(appKymera_IsStereoMirroringLeftOrRight(theKymera))
        output.mono = ChainGetOutput(theKymera->chain_input_handle, EPR_SOURCE_DECODED_PCM);
    else
    {
        output.stereo.left = ChainGetOutput(theKymera->chain_input_handle, EPR_SOURCE_DECODED_PCM);
        output.stereo.right = ChainGetOutput(theKymera->chain_input_handle, EPR_SOURCE_DECODED_PCM_RIGHT);
    }

    if(Kymera_IsMusicProcessingPresent())
    {
        if(appKymera_IsStereoMirroringLeftOrRight(theKymera))
        {
            PanicFalse(ChainConnectInput(theKymera->chain_music_processing_handle, output.mono, EPR_MUSIC_PROCESSING_IN_L));
            output.mono = ChainGetOutput(theKymera->chain_music_processing_handle, EPR_MUSIC_PROCESSING_OUT_L);
        }
        else
        {
            PanicFalse(ChainConnectInput(theKymera->chain_music_processing_handle, output.stereo.left, EPR_MUSIC_PROCESSING_IN_L));
            PanicFalse(ChainConnectInput(theKymera->chain_music_processing_handle, output.stereo.right, EPR_MUSIC_PROCESSING_IN_R));
            output.stereo.left = ChainGetOutput(theKymera->chain_music_processing_handle, EPR_MUSIC_PROCESSING_OUT_L);
            output.stereo.right = ChainGetOutput(theKymera->chain_music_processing_handle, EPR_MUSIC_PROCESSING_OUT_R);
        }
    }

    PanicFalse(Kymera_OutputConnect(output_user, &output));
}

static void kymera_A2dpCreateChain(const a2dp_codec_settings *codec_settings, uint32 max_bitrate, aptx_adaptive_ttp_latencies_t nq2q_ttp)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();
    UNUSED(max_bitrate);
    bool cp_header_enabled, split_mode_enabled = FALSE;
    uint32 rate;
    uint8 seid;
    Source media_source;

    appKymeraGetA2dpCodecSettingsCore(codec_settings, &seid, &media_source, &rate, &cp_header_enabled, NULL);

    if(seid == AV_SEID_APTX_ADAPTIVE_SNK)
    {
        bool r22_dec, r22_enc;
        appKymeraGetA2dpCodecSettingsAptxAdaptive(codec_settings, &split_mode_enabled, &r22_dec, &r22_enc);

        theKymera->aptx_adaptive_r22_dec = r22_dec;
        theKymera->aptx_adaptive_r22_enc = r22_enc;
        theKymera->split_tx_mode = FALSE;

        /* Split is used in the following cases:
         *    R2.1 96K Only
         *    R2.2 44.1K and 96K only
         * Not applicable for:
         *    R1.x
         *    R2.1 44.1
         */
        if(split_mode_enabled)
        {
            if((theKymera->aptx_adaptive_r22_dec && (rate==SAMPLE_RATE_96000 || rate==SAMPLE_RATE_44100))||
               (theKymera->aptx_adaptive_r22_dec==FALSE && rate==SAMPLE_RATE_96000))
            {
                theKymera->split_tx_mode = TRUE;
            }
        }
    }
    else
    {
        theKymera->aptx_adaptive_r22_dec = FALSE;
        theKymera->aptx_adaptive_r22_enc = FALSE;
        theKymera->split_tx_mode = FALSE;
    }

    PanicZero(media_source); /* Force panic at this point as source should never be zero */

    appKymeraBoostDspClockToMax();

    theKymera->cp_header_enabled = cp_header_enabled;
    theKymera->sink = codec_settings->sink;

    KymeraPioSet(); /* Cleared when the chain is started */
    appKymeraCreateOutputChain(seid, rate);
    appKymeraCreateInputChain(theKymera, seid);
    appKymeraConfigureInputChain(theKymera, seid,
                                 rate, max_bitrate, cp_header_enabled,
                                 nq2q_ttp);
    Kymera_CreateMusicProcessingChain();
    Kymera_ConfigureMusicProcessing(rate);
    if (seid == AV_SEID_APTX_ADAPTIVE_SNK)
    {
        Kymera_ConfigureMusicProcessing_AdditionalforAptxAdaptive();
    }
    appKymeraJoinChains(theKymera);
    theKymera->media_source = media_source;
    appKymeraConfigureDspPowerMode();
}

static void kymera_A2dpStartChain(int16 volume_in_db)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();
    Source source = theKymera->media_source;

    PanicZero(source); /* Force panic at this point as source should never be zero */
    appKymeraConfigureDspPowerMode();
    KymeraOutput_SetMainVolume(volume_in_db);
    StreamDisconnect(source, 0);
    SyncStartA2dp_StartGraphWhenInSync(source);
}

bool Kymera_A2dpStart(const a2dp_codec_settings *codec_settings, uint32 max_bitrate, int16 volume_in_db,
                                     aptx_adaptive_ttp_latencies_t nq2q_ttp)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();
    PanicNotNull(theKymera->chain_input_handle);
    kymera_A2dpCreateChain(codec_settings, max_bitrate, nq2q_ttp);
    kymera_A2dpStartChain(volume_in_db);
    Kymera_LeakthroughSetAecUseCase(aec_usecase_create_leakthrough_chain);
    return TRUE;
}

static void kymera_A2dpStopChain(Source source)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();

    ChainStop(theKymera->chain_input_handle);

    /* Disconnect A2DP source then dispose */
    StreamDisconnect(source, 0);
    StreamConnectDispose(source);

    Kymera_StopMusicProcessingChain();

    if (theKymera->aptx_adaptive_ttp_ssrc != NULL)
    {
        free(theKymera->aptx_adaptive_ttp_ssrc);
        theKymera->aptx_adaptive_ttp_ssrc = NULL;
    }
}

static void kymera_A2dpDestroyChain(void)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();
    output_users_t output_user = appKymera_IsStereoMirroringLeftOrRight(theKymera) ? output_user_a2dp_spk_mono : output_user_a2dp;

    Kymera_OutputDisconnect(output_user);
    Kymera_DestroyMusicProcessingChain();
    ChainDestroy(theKymera->chain_input_handle);
    theKymera->chain_input_handle = NULL;
    theKymera->media_source = 0;
}

void Kymera_A2dpCommonStop(Source source)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();
    DEBUG_LOG("Kymera_A2dpCommonStop, source(%p)", source);
    PanicNull(theKymera->chain_input_handle);
    Kymera_LeakthroughSetAecUseCase(aec_usecase_default);
    kymera_A2dpStopChain(source);
    kymera_A2dpDestroyChain();
}

void Kymera_A2dpHandlePrepareStage(const audio_a2dp_start_params_t *params)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();
    uint8 seid = params->codec_settings.seid;
    uint32 rate = params->codec_settings.rate;
    uint8 q2q = params->q2q_mode;

    DEBUG_LOG("Kymera_A2dpHandlePrepareStage: enum:kymera_a2dp_state_t:%d, enum:appKymeraState:%d, seid %u, rate %u, q2q %u",
              appKymeraA2dpGetState(), appKymeraGetState(), seid, rate, q2q);

    if (appA2dpIsSeidNonTwsSink(seid))
    {
        /* Only stop Leakthrough chain with non-TWS message */
        Kymera_LeakthroughStopChainIfRunning();
        PanicFalse(appKymeraA2dpGetState() == kymera_a2dp_idle);
        PanicNotNull(theKymera->chain_input_handle);
        theKymera->a2dp_seid = seid;
        theKymera->q2q_mode = q2q;
        appKymeraA2dpSetState(kymera_a2dp_preparing);
        kymera_A2dpCreateChain(&params->codec_settings, params->max_bitrate, params->nq2q_ttp);
        Kymera_LeakthroughSetAecUseCase(aec_usecase_create_leakthrough_chain);
        appKymeraA2dpSetState(kymera_a2dp_prepared);
        Kymera_LatencyManagerA2dpPrepare(params);
    }
    else if (appA2dpIsSeidSource(seid))
    {
        if (appKymeraA2dpGetState() == kymera_a2dp_streaming)
        {
            Kymera_A2dpStartForwarding(&params->codec_settings);
            appKymeraA2dpSetState(kymera_a2dp_forwarding);
        }
        else
        {
            /* Ignore attempts to start forwarding in the wrong state */
            DEBUG_LOG("Kymera_A2dpHandlePrepareStage: Ignoring start forwarding");
        }
    }
    else
    {
        /* Unsupported SEID, control should never reach here */
        Panic();
    }
}

void Kymera_A2dpHandleStartStage(uint8 seid, int16 volume_in_db)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();

    DEBUG_LOG("Kymera_A2dpHandleStartStage: enum:kymera_a2dp_state_t:%d, enum:appKymeraState:%d, seid %u",
              appKymeraA2dpGetState(), appKymeraGetState(), seid);

    if (appA2dpIsSeidNonTwsSink(seid))
    {
        PanicFalse(appKymeraA2dpGetState() == kymera_a2dp_prepared);
        PanicNull(theKymera->chain_input_handle);
        appKymeraA2dpSetState(kymera_a2dp_starting);
        kymera_A2dpStartChain(volume_in_db);
        appKymeraA2dpSetState(kymera_a2dp_streaming);
        Kymera_LatencyManagerA2dpStart(volume_in_db);
    }
    else if (appA2dpIsSeidSource(seid))
    {
        /* Forwarding is completed at the "prepare" stage */
    }
    else
    {
        /* Unsupported SEID, control should never reach here */
        Panic();
    }
}

void Kymera_A2dpHandleInternalStop(const KYMERA_INTERNAL_A2DP_STOP_T *msg)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();
    uint8 seid = msg->seid;

    DEBUG_LOG("Kymera_A2dpHandleInternalStop: enum:kymera_a2dp_state_t:%d, enum:appKymeraState:%d, seid %u",
              appKymeraA2dpGetState(), appKymeraGetState(), seid);

    if (appA2dpIsSeidNonTwsSink(seid) || appA2dpIsSeidTwsSink(seid))
    {
        switch (appKymeraA2dpGetState())
        {
            case kymera_a2dp_forwarding:
                /* Pass invalid source, since the source from this msg is _not_
                   the forwarding source. Tidy up the actual forwarding source
                   when the KYMERA_INTERNAL_A2DP_STOP is received with source seid
                   below */
                Kymera_A2dpStopForwarding(0);
                // Fall-through

            case kymera_a2dp_streaming:
                kymera_A2dpStopChain(msg->source);
                // Fall-through

            case kymera_a2dp_prepared:
                Kymera_LeakthroughSetAecUseCase(aec_usecase_default);
                /* Keep framework enabled until after DSP clock update */
                OperatorsFrameworkEnable();
                kymera_A2dpDestroyChain();
                theKymera->a2dp_seid = AV_SEID_INVALID;
                appKymeraA2dpSetState(kymera_a2dp_idle);
                /* Update DSP clock */
                appKymeraConfigureDspPowerMode();
                Kymera_LatencyManagerA2dpStop();
                Kymera_LeakthroughResumeChainIfSuspended();
                /* Corresponds to the enable used for the DSP clock update above */
                OperatorsFrameworkDisable();
            break;

            default:
                // Report, but ignore attempts to stop in invalid states
                /* In short lived MDM prim cases, the stream data need to be discarded
                   as DSP is not started by this time */
                DEBUG_LOG("Kymera_A2dpHandleInternalStop: Invalid state");
                if(msg->source) 
                {
                   DEBUG_LOG("Kymera_A2dpHandleInternalStop, disposing msg->source %p, theKymera->media_source %p", msg->source, theKymera->media_source);
                   StreamDisconnect(msg->source, 0);
                   StreamConnectDispose(msg->source);
                }
            break;
        }
    }
    else if (appA2dpIsSeidSource(seid))
    {
        if (appKymeraA2dpGetState() == kymera_a2dp_forwarding)
        {
            Kymera_A2dpStopForwarding(msg->source);
            appKymeraA2dpSetState(kymera_a2dp_streaming);
        }
        else
        {
            /* Clean up the forwarding source - see comment above */
            StreamDisconnect(msg->source, 0);
            StreamConnectDispose(msg->source);
        }
    }
    else
    {
        /* Unsupported SEID, control should never reach here */
        Panic();
    }

#ifdef INCLUDE_MUSIC_PROCESSING
    UserEq_NotificationEqIsAvailable(FALSE);
#endif
}


void Kymera_A2dpHandleInternalSetVolume(int16 volume_in_db)
{
    DEBUG_LOG("Kymera_A2dpHandleInternalSetVolume, vol %d", volume_in_db);

    if (Kymera_A2dpIsStreaming())
    {
        KymeraOutput_SetMainVolume(volume_in_db);
        Kymera_LatencyManagerHandleA2dpVolumeChange(volume_in_db);
    }
}

void appKymeraSetStereoLeftRightMix(bool stereo_lr_mix)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();

    DEBUG_LOG("appKymeraSetStereoLeftRightMix, %d", stereo_lr_mix);

    if (theKymera->enable_left_right_mix != stereo_lr_mix)
    {
        /* Only reconfigure if have actually changed the setting */
        theKymera->enable_left_right_mix = stereo_lr_mix;
        if (Kymera_A2dpIsStreaming())
        {
            appKymeraSetLeftRightMixerMode(theKymera->chain_input_handle, stereo_lr_mix, appKymera_IsStereoMirroringLeft(theKymera));
        }
    }
}

/* This function is called when audio synchronisation messages should be
   transmitted to or received from the other earbud */
void Kymera_A2dpStartForwarding(const a2dp_codec_settings *codec_settings)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();
    SyncStartA2dp_SetSyncOperatorId(ChainGetOperatorByRole(theKymera->chain_input_handle, OPR_RTP_DECODER));
    SyncStartA2dp_StartMirroring(codec_settings->sink);
}

/* This function is called when audio synchronistaion messages should stop being
   transmitted to or received from the other earbud */
void Kymera_A2dpStopForwarding(Source source)
{
    DEBUG_LOG("Kymera_A2dpStopForwarding");

    SyncStartA2dp_StopMirroring(source);
}

void Kymera_A2dpHandleSetAudioType(appKymeraAudioType audio_type, bool is_toggle_party_mode)
{
    bool isAudioRouted = Kymera_A2dpIsRouted();
    kymeraTaskData *theKymera = KymeraGetTaskData();
    
    /* if a2dp is streaming, then first stop it */
    if(isAudioRouted)
    {
        DEBUG_LOG("Kymera_A2dpHandleSetAudioType : Audio was being routed");
        
        KYMERA_INTERNAL_A2DP_STOP_T stop_param;
        KYMERA_INTERNAL_A2DP_START_T start_params = *KymeraGetLatencyData()->a2dp_start_params;
        stop_param.seid = theKymera->a2dp_seid;
        stop_param.source = theKymera->media_source;
        /* first stop the a2dp chain so that it can be re-configured */
        Kymera_A2dpHandleInternalStop(&stop_param);
        /* Need to update type only after stopping the chain. This has dependency on output chain*/
        theKymera->audio_type = audio_type;
        switch(theKymera->audio_type)
        {
            case KYMERA_AUDIO_MIRROR_STEREO:
            case KYMERA_AUDIO_MIRROR_MONO_LEFT:
            case KYMERA_AUDIO_MIRROR_MONO_RIGHT:
            case KYMERA_AUDIO_STANDALONE_STEREO:
                {
                    /* do we have any active AG streaming to start a2dp chains? */
                    if(AudioSources_IsAudioRouted(audio_source_a2dp_1) || AudioSources_IsAudioRouted(audio_source_a2dp_2))
                    {
                        Kymera_A2dpHandleInternalStart(&start_params);
                        /* audio needs to be synchronized if there is change from mono to stereo or vice-versa when
                         * audio was already streaming. This will unmute secondary speaker to play audio accordingly */
                        if(is_toggle_party_mode)
                        {
                            MirrorProfile_StartA2dpAudioSynchronisation();
                        }
                    }
                }
                break;
            default:
                Panic();
        }
    }
    else
    {
        /* No audio is routed, just update the type */
        theKymera->audio_type = audio_type;
    }
}


void Kymera_A2dpInit(void)
{
    kymeraTaskData *theKymera = KymeraGetTaskData();
    /* At startup TWM speaker works as standalone stereo */
    theKymera->audio_type = KYMERA_AUDIO_STANDALONE_STEREO;

    /* Need to register both users with output manager, because in case of TWM speaker
       there is use-case to dynamically switch b/w mono and stereo playback */
    Kymera_OutputRegister(&output_info_stereo_mirror);
    Kymera_OutputRegister(&output_info_mono_mirror);
}

#endif /* defined(INCLUDE_MIRRORING) && defined(ENABLE_TWM_STEREO) */
