﻿
#ifdef WEBRTC_ANDROID
#include <android/log.h>
#define API_LOG_TAG "mywebrtc"
#define ALOGI(...) \
  __android_log_print(ANDROID_LOG_INFO, API_LOG_TAG, __VA_ARGS__)
#define ALOGE(...) \
  __android_log_print(ANDROID_LOG_ERROR, API_LOG_TAG, __VA_ARGS__)
#endif

#define APOS() ALOGI("%s %d", __FUNCTION__, __LINE__)
#define AERR() ALOGE("%s %d", __FUNCTION__, __LINE__)

#include "ua_webrtc.h"

//#include "UALog.h"

#if 0
#define WEBRTC_SetLocalRecv(rtptrans, localport)                   \
  {                                                                \
    for (int i = 0; i < 100; i += 2) {                             \
      if (0 == (rtptrans)->SetLocalReceiver(localport + i))        \
        break;                                                     \
    }                                                              \
    uint16_t rtpport, rtcpport;                                    \
    (rtptrans)->getUdpTransport()->SourcePorts(rtpport, rtcpport); \
    (rtptrans)->getUdpTransport()->SetToS(46, true);               \
    localport = rtpport;                                           \
  }
#else
#define WEBRTC_SetLocalRecv(rtptrans, localport) \
  { (rtptrans)->SetLocalReceiver(localport); }
#endif

#define WEBRTC_API_RELEASE(ptr) \
  {                             \
    if (ptr) {                  \
      (ptr)->Release();         \
      ptr = NULL;               \
    }                           \
  }
#define WEBRTC_SafeDelete(p) \
  {                          \
    if (p) {                 \
      delete p;              \
      p = NULL;              \
    }                        \
  }

#ifdef WEBRTC_ANDROID
bool g_vidobj_setted = false;
void* g_myjvm = NULL;
#elif defined(WEBRTC_IOS)
extern int g_ua_iptype;
#endif

int webrtc_api_setvidobj(void* javaVM)  // set permission
{
#ifdef WEBRTC_ANDROID
  ALOGI("webrtc_api_setvidobj g_vidobj_setted = %d", g_vidobj_setted);

  if (NULL == g_myjvm)
    g_myjvm = javaVM;

  if (g_vidobj_setted)
    return 0;

  /*if(webrtc::VideoEngine::SetAndroidObjects((JavaVM*)g_myjvm) >= 0)
  {
      g_vidobj_setted = true;
      return 0;
  }
  else
      return -1;*/
  return 0;
#else
  return 0;
#endif
}

bool webrtc_api_getvidobj() {
  // ALOGI("======== webrtc_api_getvidobj g_vidobj_setted = %d",
  // g_vidobj_setted);
#ifdef WEBRTC_ANDROID
  return g_vidobj_setted;
#else
  return true;
#endif
}

void webrtc_api_setobj(void* javaVM, void* env, void* context) {
  webrtc::VoiceEngine::SetAndroidObjects(javaVM, context);  // use opensles
}

void webrtc_api_clearobj() {
  webrtc::VoiceEngine::SetAndroidObjects(NULL, NULL);
}

namespace webrtc {

#if 0
WEBRTCAPI *WEBRTCAPI::create()
{
    return new  WEBRTCIMPL();
}

void WEBRTCAPI::destroy(WEBRTCAPI *module)
{
    if (module) delete module;
}
#endif

WEBRTCIMPL::WEBRTCIMPL(TraceCallback* cb) {
  voe = NULL;
  vie = NULL;

  audio_base = NULL;
  video_base = NULL;
  video_capture = NULL;
  video_render = NULL;

  audio_network = NULL;
  video_network = NULL;
  audio_rtprtcp = NULL;
  video_rtprtcp = NULL;
  audio_codec = NULL;
  video_codec = NULL;

  audio_ap = NULL;
  audio_hardware = NULL;
  audio_dtmf = NULL;
  audio_vcon = NULL;
  audio_neteqstats = NULL;

  video_image = NULL;

#ifdef WEBRTC_ANDROID
  // rotate_ = RotateCapturedFrame_270;
  mirror_local_render = false;
  mirror_remote_render = false;

#else
  // rotate_ = RotateCapturedFrame_0;
  mirror_local_render = true;
  mirror_remote_render = true;
#endif

  // mirror_camera = false;

  m_AutoVideoSize = 0;

  // cameraW = 640;
  // cameraH = 480;

  cbPrint = cb;

  m_recording = false;

    is_audio_start_received = false;
    is_audio_start_sended = false;
    is_video_start_received = false;
    is_video_start_sended = false;
}

WEBRTCIMPL::~WEBRTCIMPL(void) {}

int WEBRTCIMPL::webrtc_api_init(void* javaVM, void* env, void* context) {
  if (voe || vie)
    return -1;

#if defined(WEBRTC_IOS)
  voe = VoiceEngine::Create();
#else
  /*bool FLAGS_use_acm_version_2 = true;
  Config *config = new Config();
  config->Set<AudioCodingModuleFactory>(FLAGS_use_acm_version_2 ?
                                        new NewAudioCodingModuleFactory() : new
  AudioCodingModuleFactory()); voe = VoiceEngine::Create(*config);*/
  vie = VideoEngine::Create();
#endif
  vie = VideoEngine::Create();

  if (!voe || !vie) {
    ALOGE("create voe or vie error!");
    return -1;
  }

  webrtc_api_trace(false);

  if (voe != NULL) {
    audio_base = VoEBase::GetInterface(voe);
    if (audio_base)
      audio_base->Init();
    else
      return -1;

    audio_ap = VoEAudioProcessing::GetInterface(voe);
    audio_hardware = VoEHardware::GetInterface(voe);
    audio_codec = VoECodec::GetInterface(voe);
    audio_network = VoENetwork::GetInterface(voe);
    audio_dtmf = VoEDtmf::GetInterface(voe);
    audio_vcon = VoEVolumeControl::GetInterface(voe);
    audio_rtprtcp = VoERTP_RTCP::GetInterface(voe);
    audio_neteqstats = VoENetEqStats::GetInterface(voe);
    audio_file = VoEFile::GetInterface(voe);
  }

  if (vie != NULL) {
    video_base = ViEBase::GetInterface(vie);
    if (video_base)
      video_base->Init();
    else
      return -2;

    video_render = ViERender::GetInterface(vie);
    video_codec = ViECodec::GetInterface(vie);
    video_image = ViEImageProcess::GetInterface(vie);
    video_rtprtcp = ViERTP_RTCP::GetInterface(vie);
    video_network = ViENetwork::GetInterface(vie);
    video_capture = ViECapture::GetInterface(vie);
  }

  return 0;
}

int WEBRTCIMPL::webrtc_api_exit() {
  if (!voe || !vie)
    return -1;

  WEBRTC_API_RELEASE(video_image);
  WEBRTC_API_RELEASE(video_rtprtcp);
  WEBRTC_API_RELEASE(video_network);
  WEBRTC_API_RELEASE(video_codec);
  WEBRTC_API_RELEASE(video_render);
  WEBRTC_API_RELEASE(video_capture);
  WEBRTC_API_RELEASE(video_base);

  if (audio_hardware)
    audio_hardware->ResetAudioDevice();

  WEBRTC_API_RELEASE(audio_file);
  WEBRTC_API_RELEASE(audio_neteqstats);
  WEBRTC_API_RELEASE(audio_hardware);
  WEBRTC_API_RELEASE(audio_codec);
  WEBRTC_API_RELEASE(audio_network);
  WEBRTC_API_RELEASE(audio_ap);
  WEBRTC_API_RELEASE(audio_dtmf);
  WEBRTC_API_RELEASE(audio_rtprtcp);
  WEBRTC_API_RELEASE(audio_vcon);

  if (audio_base)
    audio_base->Terminate();
  WEBRTC_API_RELEASE(audio_base);

  webrtc_api_trace(false);

  VoiceEngine::Delete(voe);  //  voe is null inside
  VideoEngine::Delete(vie);  //  vie is null inside

  return 0;
}

int WEBRTCIMPL::webrtc_api_trace(bool flag) {
  if (!voe || !vie)
    return -1;

  if (flag) {
    vie->SetTraceCallback(cbPrint);
    vie->SetTraceFilter(kTraceError | kTraceCritical);
    voe->SetTraceCallback(cbPrint);
    voe->SetTraceFilter(kTraceError | kTraceCritical);
  } else {
    voe->SetTraceFile(NULL);
    vie->SetTraceFile(NULL);
  }
  return 0;
}

int WEBRTCIMPL::webrtc_api_createchannel(int media_type, int& chn) {
  if (media_type == M_AUDIO) {
    if (audio_base)
      chn = audio_base->CreateChannel();
    // ALOGI("CREATE  ~~~~~");
  } else if (media_type == M_VIDEO) {
    if (video_base)
      video_base->CreateChannel(chn);
  }

  if (chn < 0)
    return -1;

  return 0;
}

int WEBRTCIMPL::webrtc_api_deletechannel(int media_type, int& chn) {
  if (media_type == M_AUDIO) {
    if (chn != -1) {
      if (audio_base)
        audio_base->DeleteChannel(chn);
      chn = -1;
      // ALOGI("DELETE #######");
    }
  }

  if (media_type == M_VIDEO) {
    if (chn != -1) {
      if (video_base)
        video_base->DeleteChannel(chn);
      chn = -1;
    }
  }

  return 0;
}

int WEBRTCIMPL::webrtc_api_createtrans(int chn,
                                       int media_type,
                                       void*& trans,
                                       int& localport) {
  test::VoiceChannelTransport* audio_rtpTransport;
  test::VideoChannelTransport* video_rtpTransport;

  if (media_type == M_AUDIO) {
    audio_rtpTransport = new test::VoiceChannelTransport(audio_network, chn);
    WEBRTC_SetLocalRecv(audio_rtpTransport, localport);
    trans = audio_rtpTransport;
    ALOGI("audio localport = %d trans = %p", localport, trans);
  }

  if (media_type == M_VIDEO) {
    video_rtpTransport = new test::VideoChannelTransport(video_network, chn);
    WEBRTC_SetLocalRecv(video_rtpTransport, localport);
    trans = video_rtpTransport;
    ALOGI("video localport = %d trans = %p", localport, trans);
  }

  return 0;
}

int WEBRTCIMPL::webrtc_api_deletetrans(int media_type,
                                       void* trans,
                                       int& localport) {
  if (media_type == M_AUDIO) {
    test::VoiceChannelTransport* audio_rtpTransport =
        (test::VoiceChannelTransport*)trans;
    WEBRTC_SafeDelete(audio_rtpTransport);
    localport = 20000;
  }

  if (media_type == M_VIDEO) {
    test::VideoChannelTransport* video_rtpTransport =
        (test::VideoChannelTransport*)trans;
    WEBRTC_SafeDelete(video_rtpTransport);
    localport = 21000;
  }

  return 0;
}

int WEBRTCIMPL::webrtc_api_render(int act_type, int id, void* window) {
  if (!video_render) {
    printf("webrtc_api_render error : video_render not init\n");
    return -1;
  }
  // ALOGI("进入 webrtc_api_render  action = %d id = %d window = %p", act_type,
  // id, window);

  // ALOGI("mirrorCamera %d localrender %d remote render %d", mirror_camera,
  // mirror_local_render, mirror_remote_render);

  int ret = -1;
  bool mirror = (id > 256) ? mirror_local_render : mirror_remote_render;

  if (act_type == M_START) {
    if (window == NULL) {
      AERR();
      return -1;
    }

    ret = video_render->AddRenderer(id, window, 0, 0.0, 0.0, 1.0, 1.0);
    // ret = video_render->MirrorRenderStream(id, true, false, mirror);
    ret = video_render->StartRender(id);
    // POS();LOGI("window = %p", window);
  } else if (act_type == M_STOP) {
    ret = video_render->StopRender(id);  // zth add
    ret = video_render->RemoveRenderer(id);
  } else if (act_type == M_RESTART) {
    if (window == NULL) {
      ALOGI("退出webrtc_api_view  action = %d ret = %d", act_type, ret);
      return -1;
    }

    ret = video_render->StopRender(id);  // zth add
    ret = video_render->RemoveRenderer(id);

    ret = video_render->AddRenderer(id, window, 0, 0.0, 0.0, 1.0, 1.0);
    // ret = video_render->MirrorRenderStream(id, true, false, mirror);
    ret = video_render->StartRender(id);
  }
  // ALOGI("退出webrtc_api_view  action = %d ret = %d", act_type, ret);

  return ret;
}

int WEBRTCIMPL::webrtc_api_camera(int act_type, int& captureId, void* pInfo) {
  if (!video_capture) {
    printf("webrtc_api_camera error : video_capture not init\n");
    return -1;
  }

  int ret = -1;
  // ALOGI("进入webrtc_api_camera  action = %d", act_type);
  /*if(act_type != CAMERA_ALLOC)
  {
      if(camerapermit()== false)
      {
         ALOGI("camera  not open, return directly");
              return -1;
      }
  }*/

  // static bool bStart = false, bConnect = false, bAlloc = false;
  if (act_type == M_START) {
    PicInfo* pPic = (PicInfo*)pInfo;
    // if(bStart)  return 0;
    // ALOGI("zzzz w = %d h = %d rotate_=%d\n", cameraW, cameraH, rotate_);

    CaptureCapability capture_capability;
    capture_capability.width = pPic->w;
    capture_capability.height = pPic->h;
    capture_capability.maxFPS = pPic->fps;
    capture_capability.rawType = kVideoI420;
    capture_capability.codecType = kVideoCodecI420;
    // ret = video_capture->SetRotateCapturedFrames(captureId,
    // (RotateCapturedFrame)pPic->rotate); ret =
    // video_capture->MirrorIncomingStream(captureId, true, false,
    // pPic->mirror_camera);

    ret = video_capture->StartCapture(captureId, capture_capability);
    // bStart = true;
  } else if (act_type == M_STOP) {
    // if(!bStart)  return 0;
    ret = video_capture->StopCapture(captureId); /*重复关闭应该没关系*/
                                                 // bStart = false;
  } else if (act_type == CAMERA_ALLOC) {
    int* pIndex = (int*)pInfo;
    char deviceName[64];
    char deviceUniqueName[640];
    memset(deviceName, 0, sizeof(deviceName));
    memset(deviceUniqueName, 0, sizeof(deviceUniqueName));
    ret = video_capture->GetCaptureDevice(*pIndex, deviceName,
                                          sizeof(deviceName), deviceUniqueName,
                                          sizeof(deviceUniqueName));
    // ALOGI("GetCaptureDevice ret = %d", ret);
    ret = video_capture->AllocateCaptureDevice(
        deviceUniqueName, sizeof(deviceUniqueName), captureId);
    // bAlloc = true;
    // ALOGI("AllocateCaptureDevice ret = %d captureId = %d", ret, captureId);
  } else if (act_type == CAMERA_RELEASE) {
    // if(!bAlloc)  return 0;
    ret = video_capture->ReleaseCaptureDevice(captureId);
    // bAlloc = false;
  } else if (act_type == CAMERA_CONNECT) {
    if (captureId < 0)
      return -1;

    int* pChn = (int*)pInfo;
    // int 	video_channelid = *pIndex;

    ret = video_capture->ConnectCaptureDevice(captureId, *pChn);
    // bConnect = true;
    // ALOGI("==== ConnectCaptureDevice  vchn=%d ret=%d",  *pChn ,ret);
  } else if (act_type == CAMERA_DISCONNECT) {
    // if(captureId < 0)
    //		return -1;

    // if(!bConnect)  return 0;
    int* pChn = (int*)pInfo;
    // int video_channelid = *pIndex;

    ret = video_capture->DisconnectCaptureDevice(*pChn);
    // bConnect = false;
    // ALOGI("==== DisconnectCaptureDevice  vchn = %d ret=%d",  *pChn ,ret);
  }

  // ALOGI("退出webrtc_api_camera  action = %d  ret = %d", act_type, ret );
  return ret;
}

/*
module: codec, rtprtcp, or other
direction: 0: receive, 1: send
info: remote info, eg. ip, port, codec
NOTE: capture is not in these module,as so many operation
         not exist audio receive codec
*/

int WEBRTCIMPL::webrtc_api_settrans(void* rtpTransport,
                                    int module,
                                    TransInfo* info) {
  if (rtpTransport == NULL) {
    ALOGI("rtpTransport = NULL");
    return -1;
  }

  test::VoiceChannelTransport* audio_rtpTransport;
  test::VideoChannelTransport* video_rtpTransport;

  if (module == M_AUDIO) {
    TransInfo* rmtAud = info;
    audio_rtpTransport = (test::VoiceChannelTransport*)rtpTransport;
    // LOGI("audio remote ip = %s  remote port = %d", rmtAud->ip, rmtAud->port);
    audio_rtpTransport->SetSendDestination(rmtAud->ip, rmtAud->port);
  } else if (module == M_VIDEO) {
    TransInfo* rmtVid = info;
    video_rtpTransport = (test::VideoChannelTransport*)rtpTransport;
    video_rtpTransport->SetSendDestination(rmtVid->ip, rmtVid->port);
    // LOGI("video remote ip = %s  remote port = %d", rmtVid->ip, rmtVid->port);
  }

  return 0;
}

int WEBRTCIMPL::webrtc_api_setcodec(int channelid,
                                    int type,
                                    int direction,
                                    void* info) {
  if (type == M_AUDIO) {
    if (!audio_codec) {
      printf("%s line %d error : audio_codec not init\n", myfn, myln);
      return -1;
    }

    int* pplType = (int*)info;

    if (direction == M_ENC) {
      int cn = audio_codec->NumOfCodecs();
      CodecInst inst;
      for (int i = 0; i < cn; i++) {
        audio_codec->GetCodec(i, inst);
        // LOG_INFO_LD(UALOG, "%s inst.plname=%s, inst.rate=%d, inst.plfreq=%d,
        // inst.pltype=%d", __FUNCTION__, inst.plname, inst.rate, inst.plfreq,
        // inst.pltype);
        if (inst.pltype == *pplType)
          break;
      }
      // ALOGI("inst.plname=%s, inst.rate=%d, inst.plfreq=%d, inst.pltype=%d",
      // inst.plname, inst.rate, inst.plfreq, inst.pltype);

      if (inst.pltype == 103) {
        inst.rate = 16000;
      }

      audio_codec->SetSendCodec(channelid, inst);
      audio_codec->SetRecPayloadType(channelid, inst);
    } else if (direction == M_DEC) {
      /*为啥这里没有? */
    }
  } else if (type == M_VIDEO) {
    if (!video_codec) {
      printf("%s line %d error : video_codec not init\n", myfn, myln);
      return -1;
    }

    CodecInfo* rmtVid = (CodecInfo*)info;
    if (direction == M_ENC) {
      ALOGI(
          "%s line %d dst: "
          "plname=%s,payloadtype=%d,reqW=%d,reqH=%d,reqMaxkbps=%d",
          __FUNCTION__, __LINE__, rmtVid->name, rmtVid->pltype, rmtVid->w,
          rmtVid->h, rmtVid->maxkbps);

      VideoCodec vi;
      for (int i = 0; i < video_codec->NumberOfCodecs(); i++) {
        video_codec->GetCodec(i, vi);
        if (strcmp(vi.plName, "H264") == 0) {
          vi.plType = rmtVid->pltype;
          vi.width = rmtVid->w;
          vi.height = rmtVid->h;
          vi.maxFramerate = rmtVid->fps;

          vi.maxBitrate = rmtVid->maxkbps;
          vi.minBitrate = rmtVid->minkbps;
          vi.startBitrate = rmtVid->startkbps;
          // vi.handle_decWh_fun = rmtVid->handle_decWh_fun;
          // vi.m_AutoVideoSize = m_AutoVideoSize;   // zth add

          ALOGI(
              "chn=%d send codec inst.plname=%s,  inst.pltype=%d w=%d h=%d "
              "fps=%d maxbr=%d minbr=%d startbr=%d",
              channelid, vi.plName, vi.plType, vi.width, vi.height,
              vi.maxFramerate, vi.maxBitrate, vi.minBitrate, vi.startBitrate);

          video_codec->SetSendCodec(channelid, vi);
          break;
        }
      }
    } else if (direction == M_DEC)  //  receive
    {
      /*ELCC 更新改写解码器的codec id*/
      int* pplType = (int*)info;

      VideoCodec vi;
      for (int i = 0; i < video_codec->NumberOfCodecs(); i++) {
        video_codec->GetCodec(i, vi);
        if (strcmp(vi.plName, "H264") == 0) {
          vi.plType = *pplType;
          video_codec->SetReceiveCodec(channelid, vi);
          // LOGI("recv codec inst.plname=%s, inst.pltype=%d", vi.plName,
          // vi.plType);
          break;
        }
      }
    }
  }
  return 0;
}

int WEBRTCIMPL::webrtc_api_set(int channelid,
                               int module,
                               int direction,
                               void* info) {
  int ret = 0;

  if (module == AUD_PROC) {
    ALOGI("VideoActivity AUD_PROC direction(%d)", direction);

    if (0 == direction) {
#if 0
            int ret = audio_ap->SetAgcStatus(false, kAgcAdaptiveAnalog);
#else
      ret = audio_ap->SetAgcStatus(true, kAgcAdaptiveDigital);
      if (ret < 0)
        return ret;

      AgcConfig stConfig;
      memset(&stConfig, 0, sizeof(stConfig));
      ret = audio_ap->GetAgcConfig(stConfig);
      if (ret < 0)
        return ret;
      stConfig.targetLeveldBOv = 3;
      ret = audio_ap->SetAgcConfig(stConfig);
      if (ret < 0)
        return ret;
#endif
      ret = audio_ap->SetNsStatus(true, kNsVeryHighSuppression);
      if (ret < 0)
        return ret;
      ret = audio_ap->SetEcStatus(true, kEcAecm);
      if (ret < 0)
        return ret;
#if 0
            audio_ap->SetAecmMode(kAecmQuietEarpieceOrHeadset, true);
#else
      ret = audio_ap->SetAecmMode(kAecmSpeakerphone, true);
      if (ret < 0)
        return ret;
#endif
      ret = audio_ap->EnableHighPassFilter(true);
      if (ret < 0)
        return ret;

      bool enabled;
      AgcModes mode;
      ret = audio_ap->GetAgcStatus(enabled, mode);
      if (ret < 0)
        return ret;

      ALOGI(
          "VideoActivity SetAgcStatus direction(%d) ret(%d) GetAgcStatus (%d, "
          "%d)",
          direction, ret, enabled, mode);
    } else if (1 <= direction && direction <= 4) {
      int ret = 0;
      if (1 == direction) {
        ret = audio_ap->SetAgcStatus(false, kAgcAdaptiveAnalog);
        if (ret < 0)
          return ret;

        AgcConfig stConfig;
        memset(&stConfig, 0, sizeof(stConfig));
        ret = audio_ap->GetAgcConfig(stConfig);
        if (ret < 0)
          return ret;
        stConfig.targetLeveldBOv = 3;
        ret = audio_ap->SetAgcConfig(stConfig);
        if (ret < 0)
          return ret;
      } else if (2 == direction)
        ret = audio_ap->SetAgcStatus(true, kAgcAdaptiveAnalog);
      else if (3 == direction)
        ret = audio_ap->SetAgcStatus(true, kAgcAdaptiveDigital);
      else if (4 == direction)
        ret = audio_ap->SetAgcStatus(true, kAgcFixedDigital);

      if (ret < 0)
        return ret;

      bool enabled;
      AgcModes mode;
      ret = audio_ap->GetAgcStatus(enabled, mode);
      if (ret < 0)
        return ret;

      ALOGI(
          "VideoActivity SetAgcStatus direction(%d) ret(%d) GetAgcStatus (%d, "
          "%d)",
          direction, ret, enabled, mode);
    } else if (5 == direction) {
      AgcConfig stConfig;
      memset(&stConfig, 0, sizeof(stConfig));

      ALOGI("VideoActivity targetLeveldBOv (%d)", atoi((char*)info));

      ret = audio_ap->GetAgcConfig(stConfig);
      if (ret < 0)
        return ret;
      stConfig.targetLeveldBOv = (unsigned short)atoi((char*)info);
      ret = audio_ap->SetAgcConfig(stConfig);
      if (ret < 0)
        return ret;
    } else if (6 <= direction && direction <= 13) {
      int ret = 0;

      if (6 == direction)
        ret = audio_ap->SetEcStatus(false, kEcDefault);
      else if (7 == direction)
        ret = audio_ap->SetEcStatus(true, kEcDefault);
      else if (8 == direction)
        ret = audio_ap->SetEcStatus(true, kEcAec);
      else if (9 == direction) {
        audio_ap->SetEcStatus(true, kEcAecm);
        ret = audio_ap->SetAecmMode(kAecmQuietEarpieceOrHeadset, true);
      } else if (10 == direction) {
        audio_ap->SetEcStatus(true, kEcAecm);
        ret = audio_ap->SetAecmMode(kAecmEarpiece, true);
      } else if (11 == direction) {
        audio_ap->SetEcStatus(true, kEcAecm);
        ret = audio_ap->SetAecmMode(kAecmLoudEarpiece, true);
      } else if (12 == direction) {
        audio_ap->SetEcStatus(true, kEcAecm);
        ret = audio_ap->SetAecmMode(kAecmSpeakerphone, true);
      } else if (13 == direction) {
        audio_ap->SetEcStatus(true, kEcAecm);
        ret = audio_ap->SetAecmMode(kAecmLoudSpeakerphone, true);
      }

      ALOGI(
          "VideoActivity direction (%d) SetEcStatus ret(%d) DelayOffsetMs(%d)",
          direction, ret, audio_ap->DelayOffsetMs());

      if (ret < 0)
        return ret;
    } else if (14 == direction) {
      audio_ap->SetDelayOffsetMs(atoi((char*)info));
      ALOGI("VideoActivity SetDelayOffsetMs (%d) DelayOffsetMs(%d)",
            atoi((char*)info), audio_ap->DelayOffsetMs());
    }
  } else if (module == VID_RTPRTCP) {
    int error = -1;
    if (direction == M_SEND) {
      error = video_rtprtcp->SetSendAbsoluteSendTimeStatus(channelid, true, 3);
      error = video_rtprtcp->SetSendTimestampOffsetStatus(channelid, true, 2);
    } else {
      error = video_rtprtcp->SetRTCPStatus(channelid,
                                           webrtc::kRtcpCompound_RFC4585);
      error = video_rtprtcp->SetKeyFrameRequestMethod(
          channelid, webrtc::kViEKeyFrameRequestPliRtcp);
      error = video_rtprtcp->SetRembStatus(channelid, true, true);
      // ptrViERtpRtcp->SetHybridNACKFECStatus(video_channelid, true,
      // red_payload_type, fec_payload_type);
      error = video_rtprtcp->SetNACKStatus(channelid, true);
      // ELCC 目前也支持
      error =
          video_rtprtcp->SetReceiveAbsoluteSendTimeStatus(channelid, true, 3);
      error =
          video_rtprtcp->SetReceiveTimestampOffsetStatus(channelid, true, 2);
    }

    return error;
  } else if (module == AUD_HARDW) {
    /*webrtc_audio_hardware->GetNumOfRecordingDevices(nRec);

    LOG_INFO_LD(UALOG, "%s Get num of recordingdevice:%d", __FUNCTION__, nRec);
    for(int idx = 0; idx < nRec; idx++)
    {
        webrtc_audio_hardware->GetRecordingDeviceName(idx , devName , guidName);
        LOG_INFO_LD(UALOG, "%s %d devName=%s guidName=%s", __FUNCTION__, idx,
    devName, guidName);
    }*/
    ret = audio_hardware->SetRecordingDevice(0);
    if (ret < 0)
      return ret;
  } else if (module == AUD_RTPRTCP) {
    unsigned localSSRC = (RAND_MAX > 0x7fff)
                             ? (unsigned)rand()
                             : (((unsigned)rand() << 16) + (unsigned)rand());

    audio_rtprtcp->SetLocalSSRC(channelid, localSSRC);
  } else if (module == VID_IMAGE) {
    video_image->EnableColorEnhancement(channelid, true);
  } else if (module == VID_NETW) {
    video_network->SetMTU(channelid, 1200);
  } else if (module == AUD_DTMF) {
    ret = audio_dtmf->SetSendTelephoneEventPayloadType(channelid, 101);
    if (ret < 0)
      return ret;
  }

  /*	 以后备用
  int nRec;
  char devName[128] = {0};
  char guidName[128] = {0};
  audio_hardware->GetNumOfRecordingDevices(nRec);
  //std::cout << "Get num of recordingdevice:" << nRec << std::endl;
  LOG_INFO_LD(UALOG, "%s Get num of recordingdevice:%d", __FUNCTION__, nRec);
  for(int idx = 0; idx < nRec; idx++)
  {
      audio_hardware->GetRecordingDeviceName(idx , devName , guidName);
      LOG_INFO_LD(UALOG, "%s %d devName=%s guidName=%s", __FUNCTION__, idx,
  devName, guidName);
  }*/

  return 0;
}

/* 媒体流的操作 */
int WEBRTCIMPL::webrtc_api_startmedia(int channelid,
                                      int media_direction,
                                      int media_type,
                                      void* info) {
  int ret = -1;
  if (media_type == M_AUDIO) {
    if (!audio_base) {
      printf("%s line %d error : audio_base not init\n", myfn, myln);
      return -1;
    }

    if (media_direction == M_SEND) {
      ret = audio_base->StartSend(channelid);
      is_audio_start_sended = true;
    } else if (media_direction == M_RECV) {
      ret = audio_base->StartPlayout(channelid);
      ret = audio_base->StartReceive(channelid);
      is_audio_start_received = true;
    }else {
        ret = audio_base->StartReceive(channelid);
        ret = audio_base->StartPlayout(channelid);
        ret = audio_base->StartSend(channelid);
        is_audio_start_sended = true;
        is_audio_start_received = true;
    }
  }

  if (media_type == M_VIDEO) {
    if (!video_base) {
      printf("%s line %d error : video_base not init\n", myfn, myln);
      return -1;
    }

    if (media_direction == M_SEND) {
        ret = video_base->StartSend(channelid); /*必须只调这个*/
        is_video_start_sended = true;
        // ALOGI("video send start");
    } else if (media_direction == M_RECV) {
      ret = video_base->StartReceive(channelid); /*必须只调这个*/
      is_video_start_received = true;
    } else {
        ret = video_base->StartReceive(channelid);
        ret = video_base->StartSend(channelid);
        is_video_start_sended = true;
        is_video_start_received = true;
    }
  }

  return ret;
}

int WEBRTCIMPL::webrtc_api_stopmedia(int channelid,
                                     int media_direction,
                                     int media_type,
                                     void* info) {
  int ret = -1;
  if (media_type == M_AUDIO) {
    if (!audio_base) {
      printf("%s line %d error : audio_base not init\n", myfn, myln);
      return -1;
    }

    if (media_direction == M_SEND) {
      ret = audio_base->StopSend(channelid);
      is_audio_start_sended = false;
    } else if (media_direction == M_RECV) {
      ret = audio_base->StopPlayout(
          channelid);  // StopPlayout StopReceive order is no need
      ret = audio_base->StopReceive(channelid);
      is_audio_start_received = false;
    } else{
        ret = audio_base->StopSend(channelid);
        ret = audio_base->StopPlayout(channelid);  
        ret = audio_base->StopReceive(channelid);
        is_audio_start_sended = false;
        is_audio_start_received = false;
    }
  }

  if (media_type == M_VIDEO) {
    if (!video_base) {
      printf("%s line %d error : video_base not init\n", myfn, myln);
      return -1;
    }

    if (media_direction == M_SEND) {
      ret = video_base->StopSend(channelid);
      is_video_start_sended = false;
    } else if (media_direction == M_RECV) {
      ret = video_base->StopReceive(channelid);
      is_video_start_received = false;
    } else {
        ret = video_base->StopSend(channelid);
        ret = video_base->StopReceive(channelid);
        is_video_start_sended = false;
        is_video_start_received = false;
    }
  }

  return ret;
}

int WEBRTCIMPL::webrtc_api_enablemic(bool enabled) {
  // vcon->SetChannelOutputVolumeScaling(audio_channelid, 0.3);
  int ret = audio_vcon->SetInputMute(-1, enabled);
  return ret;
}

int WEBRTCIMPL::webrtc_api_dtmf(int num) {
  int ret = -1;  // audio_dtmf->SendTelephoneEvent(audio_channelid, num);
  return ret;
}

#ifdef WEBRTC_IOS
UIView* WEBRTCIMPL::createIosView(int x,
                                  int y,
                                  int width,
                                  int height,
                                  int r,
                                  int g,
                                  int b) {
  VideoRenderIosView* pVideoView = [[VideoRenderIosView alloc]
      initWithFrame:CGRectMake(x, y, width, height)];
  pVideoView.backgroundColor =
      [UIColor colorWithRed:r / 255 green:g / 255 blue:b / 255 alpha:1.0f];
  return pVideoView;  // zth modify
}
#endif

int WEBRTCIMPL::webrtc_api_record(int act_type,
                                  int channel,
                                  int wavType,
                                  void* info) {
  char* fileNameUTF8 = (char*)info;

  int ret = -1;

  bool targetRecording = (M_START == act_type) ? true : false;

  if (m_recording == targetRecording) {
    // LOG_INFO_LD(UALOG, "%s line %d %s", __FUNCTION__, __LINE__, "WARNING:
    // repeated open or close record");
    return 0;
  }

  if (M_START == act_type) {
    CodecInst dummyCodec = {100, "L16", 16000, 320, 1, 320000};
    dummyCodec.channels = wavType;
    ret = audio_file->StartRecordingPlayout(channel, (const char*)fileNameUTF8,
                                            &dummyCodec);
  } else if (M_STOP == act_type) {
    ret = audio_file->StopRecordingPlayout(channel);
  }

  if (0 == ret) {
    m_recording = targetRecording;
  }

  return ret;
}

int WEBRTCIMPL::SetRotateCapture(int captureId, int degrees) {
  rotate_ = degrees;
  // video_capture->SetRotateCapturedFrames(captureId,
  // (RotateCapturedFrame)degrees);
  return 0;
}

int WEBRTCIMPL::GetAudioRtcpStatistics(int achn, DataUnit* pstDataUnit) {
  CallStatistics stats;
  audio_rtprtcp->GetRTCPStatistics(achn, stats);

  /*pstDataUnit->currData[M_AUDIO][M_RECV].bytes = stats.bytesReceived;
  pstDataUnit->currData[M_AUDIO][M_RECV].packets = stats.packetsReceived;
  pstDataUnit->currData[M_AUDIO][M_SEND].bytes = stats.bytesSent;
  pstDataUnit->currData[M_AUDIO][M_SEND].packets = stats.packetsSent;

  pstDataUnit->outLossrate[M_AUDIO] = (stats.fractionLost * 100) >> 8;
  pstDataUnit->jitter[M_AUDIO] = stats.jitterSamples;
  pstDataUnit->delay[M_AUDIO] = stats.rttMs;*/

  // NetworkStatistics netw_stats;
  audio_neteqstats->GetNetworkStatistics(achn, pstDataUnit->netw_stats);

  // AudioDecodingCallStats adec_stats;
  audio_neteqstats->GetDecodingCallStatistics(achn, &(pstDataUnit->adec_stats));

  return 0;
}

int WEBRTCIMPL::GetVideoRtcpStatistics(int vchn, DataUnit* pstDataUnit) {
  StreamDataCounters DataSent;
  StreamDataCounters DataReceived;
  video_rtprtcp->GetRtpStatistics(vchn, DataSent, DataReceived);

  /*pstDataUnit->currData[M_VIDEO][M_RECV].bytes =
  DataReceived.transmitted.TotalBytes();
  pstDataUnit->currData[M_VIDEO][M_RECV].packets =
  DataReceived.transmitted.packets; pstDataUnit->currData[M_VIDEO][M_SEND].bytes
  = DataSent.transmitted.TotalBytes();
  pstDataUnit->currData[M_VIDEO][M_SEND].packets =
  DataSent.transmitted.packets;*/

  RtcpStatistics basic_stats;
  int64_t rtt_ms;
  video_rtprtcp->GetReceiveChannelRtcpStatistics(vchn, basic_stats, rtt_ms);

  /*pstDataUnit->outLossrate[M_VIDEO] = (basic_stats.fraction_lost * 100) >> 8;
  pstDataUnit->jitter[M_VIDEO] = basic_stats.jitter;
  pstDataUnit->delay[M_VIDEO] = rtt_ms;

      pstDataUnit->currframecnt[M_RECV] = 15;
      pstDataUnit->currframecnt[M_SEND] = 15;
  */

  return 0;
}

int WEBRTCIMPL::getVal(char* cmd, char* pVal) {
  return -1;
}

int WEBRTCIMPL::getVal(char* cmd, int* pVal) {
  return -1;
}

int WEBRTCIMPL::setVal(char* cmd, char* Val) {
  return -1;
}

int WEBRTCIMPL::setVal(char* cmd, int val) {
  int ret = 0;
  /* if(0 == strcasecmp(cmd, "cameraW"))
   {
       //APOS(); ALOGI("val sss = %d", val);
       cameraW = val;
   }
       else if(0 == strcasecmp(cmd, "cameraH"))
   {
       //APOS(); ALOGI("val = %d", val);
       cameraH = val;
   }

   else if(0 == strcasecmp(cmd, "mirrorCamera"))
   {
       mirror_camera = (bool)val;
   }
   else if(0 == strcasecmp(cmd, "rotate"))
   {
       rotate_ = val;
   }
   else */
  if (0 == strcasecmp(cmd, "mirrorLocalRender")) {
    mirror_local_render = (bool)val;
  } else if (0 == strcasecmp(cmd, "mirrorRemoteRender")) {
    mirror_remote_render = (bool)val;
  }

  else if (0 == strcasecmp(cmd, "AutoVideoSize")) {
    m_AutoVideoSize = (bool)val;
  } else {
    ret = -1;
    ALOGI("SDK ERROR: setVal param not found!\n");
  }

  return ret;
}

}  // namespace webrtc

// uint16_t rtpPort,rtcport;
// bool echo_cancellation=true;
// webrtc::EcModes ec_mode = webrtc::kEcConference;
// webrtc::AecmModes aecm_mode = webrtc::kAecmSpeakerphone;
// webrtc::AgcModes agc_mode = webrtc::kAgcAdaptiveAnalog;
// webrtc::NsModes ns_mode = webrtc::kNsHighSuppression;
// bool aecm_comfort_noise = false;
// char devName[128] = { 0 };
// char guidName[128] = { 0 };
// int nRec;
/*AudioOptions options;
options.echo_cancellation.Set(true);
options.auto_gain_control.Set(true);
options.noise_suppression.Set(true);
options.highpass_filter.Set(true);
options.stereo_swapping.Set(false);
options.typing_detection.Set(true);
options.conference_mode.Set(false);
options.adjust_agc_delta.Set(0);
options.experimental_agc.Set(false);
options.experimental_aec.Set(false);
options.experimental_ns.Set(false);
options.aec_dump.Set(false);
options.experimental_acm.Set(false);
*/
