
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <string.h>

//#define MT_DEBUG
#include "miot.h"

static bool s_bstart = false;

static GMainLoop *vs_loop;
static GstElement *vs_pipeline;
static GstElement *ao_pipeline;
static GstElement *ai_pipeline;

static GstFlowReturn 
video_new_sample(GstElement *elt, gpointer pdata)
{
    GstSample *sample;
    GstBuffer *buf;
    GstMapInfo map;
    guint8 *data;
    gsize size;
    GstClockTime pts;
    guint timestamp;
    guint offset, frametype;
    GstFlowReturn ret = GST_FLOW_OK;
    struct FrameInfo *fi = (struct FrameInfo *)pdata; 

    if(fi->bts == 0) {
        GTimeVal now;
        g_get_current_time(&now);
        fi->bts = GST_TIMEVAL_TO_TIME(now);
    }

    sample = gst_app_sink_pull_sample(GST_APP_SINK(elt));
    buf = gst_sample_get_buffer(sample);
    gst_sample_unref(sample);

    gst_buffer_map(buf, &map, GST_MAP_READ);
    data = map.data;
    size = map.size;

    pts = GST_BUFFER_PTS(buf);
    pts += fi->bts;
    timestamp = GST_TIME_AS_MSECONDS(pts);

    mt_dbg("###vpu buffer size %" G_GSIZE_FORMAT, size);
    mt_dbg("timestamp: %u\n", timestamp);
        
  if((data[0] == 0) && (data[1] == 0)) {
    if(data[2] == 1)
        offset = 2;
    else if((data[2] == 0) && (data[3] == 1))
        offset = 3;
    else
        goto buffer_error;
  } else {
    goto buffer_error;
  }
  frametype = data[++offset] & 0x1F;
  if(frametype == 1) {
      frametype = 1;
  } else if((frametype == 5) || (frametype == 7) || (frametype == 8))
      frametype = 0;
  else 
      goto buffer_error;

#ifdef MT_DEBUG
  {
      gint i;
        for(i = 0; i < 20 && i < size; i++)
            mt_dbg("%02X ", data[i]);
        mt_dbg("\n");
  }
#endif

  fi->ntotal++;
  if(frametype == 0) {
    fi->ngop++;
    fi->nframe = 0;
  } else {
    fi->nframe++;
  }
    
  tx_set_video_data(data, size, frametype, timestamp, 
          fi->ngop, fi->nframe, fi->ntotal, fi->quant);
  mt_dbg("ngop: %d, nframe: %d, ntotal: %d\n", fi->ngop, fi->nframe, fi->ntotal);

  ret = GST_FLOW_OK;
buffer_error:
    gst_buffer_unmap(buf, &map);

    return ret;;

}

static gboolean 
video_stream_bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
    GstElement *pipeline = (GstElement *)data;
    switch(GST_MESSAGE_TYPE(msg)) {
        case GST_MESSAGE_EOS:
            gst_element_set_state(pipeline, GST_STATE_NULL);
            break;
        case GST_MESSAGE_ERROR:
        {
            gchar *debug;
            GError *error;
            gst_message_parse_error(msg, &error, &debug);
            g_free(debug);
            g_printerr("stream: %s\n", error->message);
            g_error_free(error);
            gst_element_set_state(pipeline, GST_STATE_NULL);
            break;
        }
        default:
            break;
    }
    return TRUE;
}

static int video_stream_init(int argc, char *argv[])
{
   GstElement *vs_src, *vs_enc, *vs_sink, *vs_queue;
   GstCaps *vs_caps;
   GstBus *bus;
   struct FrameInfo *fi;

   gst_init(NULL, NULL);

   vs_pipeline = gst_pipeline_new("vs-pipeline");
   g_assert(vs_pipeline != NULL);

   vs_src = gst_element_factory_make("imxv4l2src", "video-source");
//   vs_src = gst_element_factory_make("videotestsrc", "video-source");
   g_assert(vs_src != NULL);

   vs_enc = gst_element_factory_make("vpuenc", "video-pay");
   g_assert(vs_enc != NULL);

   vs_queue = gst_element_factory_make("queue2", "video-queue");
   g_assert(vs_queue != NULL);

   vs_sink = gst_element_factory_make("appsink", "video-sink");
   g_assert(vs_sink != NULL);

   vs_caps = gst_caps_new_simple("video/x-h264", 
           "stream-format", G_TYPE_STRING, "byte-stream", 
           NULL);
   g_assert(vs_caps != NULL);

   bus = gst_pipeline_get_bus(GST_PIPELINE(vs_pipeline));
   gst_bus_add_watch(bus, video_stream_bus_call, vs_pipeline);
   gst_object_unref(bus);

//   g_object_set(G_OBJECT(vs_enc), "quant", 34, NULL);
   g_object_set(G_OBJECT(vs_enc), "gop-size", 30, NULL);
   g_object_set(G_OBJECT(vs_sink), "emit-signals", TRUE, NULL);
   g_object_set(G_OBJECT(vs_sink), "caps", vs_caps, NULL);

   fi = g_malloc0(sizeof(struct FrameInfo));
   fi->quant = 34;
   fi->ngop = -1;
   fi->nframe = -1;
   fi->ntotal = -1;
   fi->bts = 0;
   g_object_set_data(G_OBJECT(vs_pipeline), "VideoFrameInfo", fi);

   g_signal_connect(vs_sink, "new-sample", G_CALLBACK(video_new_sample), fi);

   gst_bin_add_many(GST_BIN(vs_pipeline), vs_src, vs_enc, vs_queue, vs_sink, NULL);

   gst_element_link_many(vs_src, vs_enc, vs_queue, vs_sink, NULL);

   gst_caps_unref(vs_caps);

   return 0;
}

static GstFlowReturn
audio_out_new_sample(GstElement *elt, gpointer pdata)
{
    GstSample *sample;
    GstBuffer *buf;
    GstMapInfo map;
    guint8 *data;
    gsize size;
    struct AudioInfo *aoi = (struct AudioInfo *)pdata;
    GstFlowReturn ret = GST_FLOW_OK;

    sample = gst_app_sink_pull_sample(GST_APP_SINK(elt));
    buf = gst_sample_get_buffer(sample);
    gst_sample_unref(sample);

    gst_buffer_map(buf, &map, GST_MAP_READ);
    data = map.data;
    size = map.size;

    mt_dbg("####audio buffer size %" G_GSIZE_FORMAT "\n", size);
#ifdef MT_DEBUG
    {
        gint i;
        for(i = 0; i < 20 && i < size; i++)
            mt_dbg("%02X ", data[i]);
        mt_dbg("\n");
    }
#endif

    if(size != AMR_BUF_SIZE)
        goto ao_sample_out;

    memmove(aoi->pos, data, size);
    if(aoi->cnt < AMR_PKG_MAX - 1 ) {
        g_mutex_lock(&aoi->dqmutex);
        aoi->pos += size;
        aoi->cnt++;
        g_mutex_unlock(&aoi->dqmutex);
    } else {
        guint8 *tmp;
        g_mutex_lock(&aoi->dqmutex);
        if(aoi->dq == AUDIOC_DBUF) {
            tmp = aoi->dbuf;
            aoi->pos = aoi->qbuf;
            aoi->dq = AUDIOC_QBUF;
        } else {
            tmp = aoi->qbuf;
            aoi->pos = aoi->dbuf;
            aoi->dq = AUDIOC_DBUF;
        } 
        aoi->cnt = 0;
        g_mutex_unlock(&aoi->dqmutex);
        tx_set_audio_data(&aoi->param, tmp, aoi->maxbuf);
        memset(tmp, 0, aoi->maxbuf);
    }

ao_sample_out:
    gst_buffer_unmap(buf, &map);

    return ret;

}

static int audio_out_stream_init(int argc, char *argv[])
{
    GstElement *ao_src, *ao_conv, *ao_res, *ao_enc, *ao_sink, *ao_queue;
    GstCaps *ao_caps;
//    GstBus *bus;
    struct AudioInfo *aoi;

    ao_pipeline = gst_pipeline_new("ao-pipeline");
    g_assert(ao_pipeline != NULL);

//    ao_src = gst_element_factory_make("audiotestsrc", "audio-out-source");
    ao_src = gst_element_factory_make("alsasrc", "audio-out-source");
    g_assert(ao_src != NULL);

    ao_conv = gst_element_factory_make("audioconvert", "audio-out-convert");
    g_assert(ao_conv != NULL);

    ao_res = gst_element_factory_make("audioresample", "audio-out-resample");
    g_assert(ao_res != NULL);

    ao_enc = gst_element_factory_make("amrnbenc", "audio-out-pay");
    g_assert(ao_enc != NULL);

    ao_queue = gst_element_factory_make("queue2", "audio-out-queue");
    g_assert(ao_queue != NULL);

    ao_sink = gst_element_factory_make("appsink", "audio-out-sink");
    g_assert(ao_sink != NULL);

    ao_caps = gst_caps_new_simple("audio/AMR",
            "channels", G_TYPE_INT, 1,
            "rate", G_TYPE_INT, 8000,
            NULL);
    g_assert(ao_caps != NULL);

//    bus = gst_pipeline_get_bus(GST_PIPELINE(ao_queue));
//    gst_bus_add_watch(bus, audio_out_stream_bus_call, ao_queue);
//    gst_object_unref(bus);

    g_object_set(G_OBJECT(ao_src), "device", "hw:2", NULL);
    g_object_set(G_OBJECT(ao_sink), "emit-signals", TRUE, NULL);
    g_object_set(G_OBJECT(ao_sink), "caps", ao_caps, NULL);
    
   aoi = g_malloc0(sizeof(struct AudioInfo));

   aoi->dir = AUDIO_SEND_TO_QQ;

   aoi->param.head_length = sizeof(aoi->param);
   aoi->param.audio_format = 1;
   aoi->param.encode_param = 7;
   aoi->param.frame_per_pkg = AMR_PKG_MAX;
   aoi->param.sampling_info = GET_SIMPLING_INFO(1, 8,16);

   aoi->maxbuf = AMR_PKG_MAX * AMR_BUF_SIZE;
   aoi->dbuf = (guint8 *)g_malloc0( aoi->maxbuf + 1);
   aoi->qbuf = (guint8 *)g_malloc0( aoi->maxbuf + 1);
   aoi->dq = AUDIOC_DBUF;
   aoi->pos = aoi->dbuf;
   g_mutex_init(&aoi->dqmutex);
   aoi->cnt = 0;

    g_signal_connect(ao_sink, "new-sample", 
            G_CALLBACK(audio_out_new_sample), aoi);

   g_object_set_data(G_OBJECT(ao_pipeline), "AudioOutInfo", aoi);

   gst_bin_add_many(GST_BIN(ao_pipeline), ao_src, ao_conv, ao_res, 
            ao_enc, ao_queue, ao_sink, NULL);

   gst_element_link_many(ao_src, ao_conv, ao_res, 
            ao_enc, ao_queue, ao_sink, NULL);

   gst_caps_unref(ao_caps);

    return 0;

}

static void audio_in_need_data(GstElement* elt, guint length, gpointer pdata)
{
    GstBuffer *buf;
    GstMapInfo map;
    guint8 *data;
    gsize size;
    struct AudioInfo *aii = (struct AudioInfo *)pdata;

    g_print("##need length: %d\n", length);

} 

static int audio_in_stream_init(int argc, char *argv[])
{
    GstElement *ai_src, *ai_queue, *ai_dec, *ai_conv, *ai_res, *ai_sink;
    GstCaps *ai_caps;
    struct AudioInfo *aii;

    ai_pipeline = gst_pipeline_new("ai-pipeline");
    g_assert(ai_pipeline != NULL);

    ai_src = gst_element_factory_make("appsrc", "audio-in-source");
    g_assert(ai_src != NULL);

    ai_queue = gst_element_factory_make("queue2", "audio-in-queue");
    g_assert(ai_queue != NULL);

    ai_dec = gst_element_factory_make("beepdec", "audio-in-depay");
    g_assert(ai_dec != NULL);

    ai_conv = gst_element_factory_make("audioconvert", "audio-in-convert");
    g_assert(ai_conv != NULL);

    ai_res = gst_element_factory_make("audioresample", "audio-in-resample");
    g_assert(ai_res != NULL);

    ai_sink = gst_element_factory_make("alsasink", "audio-in-sink");
    g_assert(ai_sink != NULL);

    ai_caps = gst_caps_new_simple("audio/AMR",
            "channels", G_TYPE_INT, 1,
            "rate", G_TYPE_INT, 8000,
            NULL);
    g_assert(ai_caps != NULL);
    
    g_object_set(G_OBJECT(ai_src), "emit-signals", TRUE, NULL);
    g_object_set(G_OBJECT(ai_src), "caps", ai_caps, NULL);

   aii = g_malloc0(sizeof(struct AudioInfo));

   aii->dir = AUDIO_RECV_FROM_QQ;

   aii->param.head_length = sizeof(aii->param);
   aii->param.audio_format = 1;
   aii->param.encode_param = 7;
   aii->param.frame_per_pkg = AMR_PKG_MAX;
   aii->param.sampling_info = GET_SIMPLING_INFO(1, 8,16);

   aii->maxbuf = AMR_PKG_MAX * AMR_BUF_SIZE;
   aii->dbuf = (guint8 *)g_malloc0( aii->maxbuf + 1);
   aii->qbuf = (guint8 *)g_malloc0( aii->maxbuf + 1);
   aii->dq = AUDIOC_DBUF;
   aii->pos = aii->dbuf;
   g_mutex_init(&aii->dqmutex);
   aii->cnt = AMR_PKG_MAX;

   g_signal_connect(ai_src, "need-data", 
            G_CALLBACK(audio_in_need_data), aii);

   g_object_set_data(G_OBJECT(ai_pipeline), "AudioInInfo", aii);

   gst_bin_add_many(GST_BIN(ai_pipeline), ai_src, ai_queue, ai_dec,
           ai_conv, ai_res, ai_sink, NULL);

   gst_element_link_many(ai_src, ai_queue, ai_dec, 
           ai_conv, ai_res, ai_sink, NULL);

   gst_object_unref(ai_caps);
    return 0;
}

void test_recv_audiodata(tx_audio_encode_param *param, unsigned char *pcEncData, int nEncDataLen)
{
   g_print("###param:\n");
   g_print("   format: %X param: %X fame_pkg: %X\n", 
           param->audio_format, param->encode_param, param->frame_per_pkg);
   g_print(" sampling_info: %X\n", param->sampling_info);
   g_print(" datalen: %X\n", nEncDataLen);
}

/**
 * 开始采集视频的回调
 * 通知摄像头视频链路已经建立，可以通过 tx_set_video_data接口向 AV SDK 填充采集到的视频数据
 * 为了测试，这里是启动了一个线程从文件中读取数据并发送，实际应用中可以启动一个线程去实时采集
 */
bool test_start_camera() {
	mt_dbg("###### test_start_camera ###################################### \n");

	s_bstart = true;
    gst_element_set_state(vs_pipeline, GST_STATE_PLAYING);

    return true;
}

/**
 * 停止采集视频的回调
 * 通知摄像头视频链路已经断开，可以不用再继续采集视频数据。
 */
bool test_stop_camera() {
    struct FrameInfo *fi;

	mt_dbg("###### test_stop_camera ###################################### \n");
	s_bstart = false;
    gst_element_set_state(vs_pipeline, GST_STATE_NULL);

    fi = g_object_get_data(G_OBJECT(vs_pipeline), "VideoFrameInfo");
    fi->ngop = -1;
    fi->nframe = -1;
    fi->ntotal = -1;
    fi->bts = 0;

    return true;
}

/**
 * 视频码率意味着1s产生多少数据，这个参数跟网络带宽的使用直接相关
 * AV SDK 会根据当前的网络情况和Qos信息，给出建议的bitrate，
 * 上层应用可以根据这个建议值设置Camera的各项参数，如帧率、分辨率，量化参数等，从而获得合适的码率
 */
bool test_set_bitrate(int bit_rate) {
    mt_dbg("###### test_set_bitrate  ##################################### %d \n", bit_rate);
	return true;
}

/**
 * 如果I帧丢了，那么发再多的P帧也没有多大意义，AV SDK 会在发现这种情况发生的时候主动通知上层重新启动一个I帧
 */
bool test_restart_gop() {
	mt_dbg("###### test_restart_gop ###################################### \n");
    return true;
}

/**
 * 开始采集音频的回调
 * 通知麦克风音频链路已经建立，可以通过 tx_set_audio_data 接口向 AV SDK 填充采集到的音频数据
 * 这里只测试视频，所以这里是空实现
 */
bool test_start_mic() {
	g_print("###### test_start_mic ###################################### \n");
    gst_element_set_state(ao_pipeline, GST_STATE_PLAYING);
	return true;
}

/**
 * 停止采集音频的回调
 * 通知摄像头音频链路已经断开，可以不用再继续采集音频数据
 * 这里只测试视频，所以这里是空实现
 */
bool test_stop_mic() {
	g_print("###### test_stop_mic ######################################\n");
    gst_element_set_state(ao_pipeline, GST_STATE_NULL);
	return true;
}

/********************************************************************/

// 标记是否已经启动音视频服务
static bool g_start_av_service = false;

/**
 * 登录完成的通知，errcode为0表示登录成功，其余请参考全局的错误码表
 */
void on_login_complete(int errcode) {
    mt_dbg("on_login_complete | code[%d]\n", errcode);
}

/**
 * 在线状态变化通知， 状态（status）取值为 11 表示 在线， 取值为 21 表示  离线
 * old是前一个状态，new是变化后的状态（当前）
 */
void on_online_status(int old, int new) {
	mt_dbg("online status: %s\n", 11 == new ? "true" : "false");

    // 上线成功，启动音视频服务
    if(11 == new && !g_start_av_service) {
    	// 视频通知：手Q发送请求视频信令，SDK收到后将调用 on_start_camera 函数
    	// 这里只测试视频数据
    	tx_av_callback avcallback = {0};
		avcallback.on_start_camera = test_start_camera;
		avcallback.on_stop_camera  = test_stop_camera;
		avcallback.on_set_bitrate  = test_set_bitrate;
		avcallback.on_start_mic    = test_start_mic;
		avcallback.on_stop_mic     = test_stop_mic;
        avcallback.on_recv_audiodata = test_recv_audiodata;

    	int ret = tx_start_av_service(&avcallback);
    	if (err_null == ret) {
    		g_print(" >>> tx_start_av_service successed\n");
    	}
    	else {
    		g_print(" >>> tx_start_av_service failed [%d]\n", ret);
    	}

    	g_start_av_service = true;
    }
}

/**
 * 辅助函数: 从文件读取buffer
 * 这里用于读取 license 和 guid
 * 这样做的好处是不用频繁修改代码就可以更新license和guid
 */
bool readBufferFromFile(char *pPath, char *pBuffer, int nInSize, int *pSizeUsed) {
	if (!pPath || !pBuffer) {
		return false;
	}

	int uLen = 0;
	FILE * file = fopen(pPath, "rb");
	if (!file) {
	    return false;
	}

	fseek(file, 0L, G_SEEK_END);
	uLen = ftell(file);
	fseek(file, 0L, SEEK_SET);

	if (0 == uLen || nInSize < uLen) {
		g_print("invalide file or buffer size is too small...\n");
		return false;
	}

	*pSizeUsed = fread(pBuffer, 1, uLen, file);
	// bugfix: 0x0a is a lineend char, no use.
	if (pBuffer[uLen-1] == 0x0a)
	{
		*pSizeUsed = uLen - 1;
		pBuffer[uLen - 1] = '\0';
	}

	mt_dbg("len:%d, ulen:%d\n",uLen, *pSizeUsed);
	fclose(file);
	return true;
}

/**
 * 辅助函数：SDK的log输出回调
 * SDK内部调用改log输出函数，有助于开发者调试程序
 */
void log_func(int level, const char* module, int line, const char* message)
{
//    g_print("%s\n", message);
    return;
}

/**
 * SDK初始化
 * 例如：
 * （1）填写设备基本信息
 * （2）打算监听哪些事件，事件监听的原理实际上就是设置各类消息的回调函数，
 * 	例如设置CC消息通知回调：
 * 	开发者应该定义如下的 my_on_receive_ccmsg 函数，将其赋值tx_msg_notify对象中对应的函数指针，并初始化：
 *
 * 			tx_msg_notify msgNotify = {0};
 * 			msgNotify.on_receive_ccmsg = my_on_receive_ccmsg;
 * 			tx_init_msg(&msgNotify);
 *
 * 	那么当SDK内部的一个线程收到对方发过来的CC消息后（通过服务器转发），将同步调用 msgNotify.on_receive_ccmsg 
 */
bool initDevice(int argc, char *argv[]) {
	// 读取 license
    char license[256] = {0};
    int nLicenseSize = 0;
    if (!readBufferFromFile("./licence.sign.file.txt", license, sizeof(license), &nLicenseSize)) {
        g_print("[error]get license from file failed...\n");
        return false;
    }

    // 读取guid
    char guid[32] = {0};
    int nGUIDSize = 0;
    if(!readBufferFromFile("./GUID_file.txt", guid, sizeof(guid), &nGUIDSize)) {
        g_print("[error]get guid from file failed...\n");
        return false;
    }

    char svrPubkey[256] = {0};
    int nPubkeySize = 0;
    if (!readBufferFromFile("./1700002022.pem", svrPubkey, sizeof(svrPubkey), &nPubkeySize))
    {
        g_print("[error]get svrPubkey from file failed...\n");
        return NULL;
    }

    // 设备的基本信息
    tx_device_info info = {0};
    info.os_platform            = "Linux";

    info.device_name            = "Muxvision audio test";
    info.device_serial_number   = guid;
    info.device_license         = license;
    info.product_version        = 1;
    info.network_type			= network_type_wifi;

    info.product_id             = 1700002022;
    info.server_pub_key         = svrPubkey;

	// 设备登录、在线状态、消息等相关的事件通知
	// 注意事项：
	// 如下的这些notify回调函数，都是来自硬件SDK内部的一个线程，所以在这些回调函数内部的代码一定要注意线程安全问题
	// 比如在on_login_complete操作某个全局变量时，一定要考虑是不是您自己的线程也有可能操作这个变量
    tx_device_notify notify      = {0};
    notify.on_login_complete     = on_login_complete;
    notify.on_online_status      = on_online_status;
    notify.on_binder_list_change = NULL;

    // SDK初始化目录，写入配置、Log输出等信息
    // 为了了解设备的运行状况，存在上传异常错误日志 到 服务器的必要
    // system_path：SDK会在该目录下写入保证正常运行必需的配置信息
    // system_path_capicity：是允许SDK在该目录下最多写入多少字节的数据（最小大小：10K，建议大小：100K）
    // app_path：用于保存运行中产生的log或者crash堆栈
    // app_path_capicity：同上，（最小大小：300K，建议大小：1M）
    // temp_path：可能会在该目录下写入临时文件
    // temp_path_capicity：这个参数实际没有用的，可以忽略
    tx_init_path init_path = {0};
    init_path.system_path = "./";
    init_path.system_path_capicity = 100 * 1024;
    init_path.app_path = "/var/run";
    init_path.app_path_capicity = 1024 * 1024;
    init_path.temp_path = "/var/run";
    init_path.temp_path_capicity = 10 * 1024;

    // 设置log输出函数，如果不想打印log，则无需设置。
    // 建议开发在开发调试阶段开启log，在产品发布的时候禁用log。
#ifdef MT_DEBUG
    tx_set_log_func(log_func);
#endif

    video_stream_init(argc, argv);
    audio_out_stream_init(argc, argv);

    // 初始化SDK，若初始化成功，则内部会启动一个线程去执行相关逻辑，该线程会持续运行，直到收到 exit 调用
	int ret = tx_init_device(&info, &notify, &init_path);
	if (err_null == ret) {
		g_print(" >>> tx_init_device success\n");
	}
	else {
		g_print(" >>> tx_init_device failed [%d]\n", ret);
		return false;
	}

	return true;
}


/****************************************************************
*  测试代码：
*
*  （1）while循环的作用仅仅是使 Demo进程不会退出，实际使用SDK时一般不需要
*
*  （2） 输入 "quit" 将会退出当前进程，这段逻辑存在的原因在于：
*     					在某些芯片上，直接用Ctrl+C 退出易产生僵尸进程
*
*  （3）while循环里面的sleep(1)在这里是必须的，因为如果Demo进程后台运行，scanf没有阻塞作用，会导致当前线程跑满CPU
*
*****************************************************************/
int main(int argc, char* argv[]) {
    GMainLoop *mt_loop;
	if ( !initDevice(argc, argv) ) {
		return -1;
	}
	
	// 你可以在做其他相关的事情
	// ...
    
    mt_loop = g_main_loop_new(NULL, FALSE);
    g_assert(mt_loop != NULL);
    g_main_loop_run(mt_loop);
	
	return 0;
}


