





#include"mll_Clustering.h"



#include<stdlib.h>
#include<math.h>
#include "gl_.h"  //用于绘图验证


/// <summary>
/// 根据K，N，D分配内存。
/// </summary>
void K_Means_allocate(K_Means* kmeans)
{
    kmeans->sample_vector = (vec*)malloc(kmeans->N * sizeof(vec));
    for (int i = 0; i < kmeans->N; i++) {
        vec_allocate(&(kmeans->sample_vector[i]), kmeans->D);
    }

    kmeans->mean_vector = (vec*)malloc(kmeans->N * sizeof(vec));
    for (int i = 0; i < kmeans->N; i++) {
        vec_allocate(&(kmeans->mean_vector[i]), kmeans->D);
    }
    kmeans->sample_model_1 = (int*)malloc(kmeans->N * sizeof(int));
    kmeans->sample_model_2 = (int*)malloc(kmeans->N * sizeof(int));
    kmeans->means_count = (int*)malloc(kmeans->K * sizeof(int));
}

/// <summary>
/// 释放内存
/// </summary>
void K_Means_deallocate(K_Means* kmeans)
{
    if (kmeans->sample_vector != NULL) {
        for (int i = 0; i < kmeans->N; i++) {
            vec_deallocate(&(kmeans->sample_vector[i]));
        }
        free(kmeans->sample_vector);
        kmeans->sample_vector = NULL;
    }

    if (kmeans->mean_vector != NULL) {
        for (int i = 0; i < kmeans->K; i++) {
            vec_deallocate(&(kmeans->mean_vector[i]));
        }
        free(kmeans->mean_vector);
        kmeans->mean_vector = NULL;
    }

    if (kmeans->means_count != NULL) {
        free(kmeans->means_count);
        kmeans->means_count = NULL;
    }
    if (kmeans->sample_model_1 != NULL) {
        free(kmeans->sample_model_1);
        kmeans->sample_model_1 = NULL;
    }
    if (kmeans->sample_model_2 != NULL) {
        free(kmeans->sample_model_2);
        kmeans->sample_model_2 = NULL;
    }
}

/// <summary>
/// clustering 聚类。
/// </summary>
/// <param name="k">聚类中心个数</param>
/// <param name="sample">样本矩阵，每个行矢量都是一个样本</param>
/// <param name="max_iterate_count">最大的迭代次数。迭代次数不会超过它</param>
/// <returns>迭代次数</returns>
int K_Means_clustering(K_Means* kmeans, int k, mat sample, int max_iterate_count)
{
    K_Means_deallocate(kmeans);
    kmeans->K = k;
    kmeans->N = sample.rows;
    kmeans->D = sample.cols;

    //样本数小于聚类维度，没有意义,直接返回。
    if (kmeans->K > kmeans->N) {
        //  std::cout << " 样本数量比聚类数量少，没有意义。";
        return 0;
    }

    K_Means_allocate(kmeans);
    //初始化样本矢量。
    for (int i = 0; i < kmeans->N; i++) {
        for (int j = 0; j < sample.cols; j++) {
            kmeans->sample_vector[i].data[j] = sample.data[i][j];
        }
    }

    //下面初始化模型聚类中心。随机选K个样本作为聚类中心。
    int multiplier = 16807;  //乘子
    unsigned long int  rand_index = 31415926;     //随机种子。
    for (int mi = 0; mi < kmeans->K; mi++) {
        rand_index = (rand_index * multiplier) % kmeans->N;  //这里rand_index就是随机的索引了。
        vec_copyfrom(kmeans->mean_vector[mi], kmeans->sample_vector[rand_index]);
    }

    //初始化样本的聚类。先默认全部在第0类。
    kmeans->sample_model = kmeans->sample_model_1;
    for (int i = 0; i < kmeans->N; i++) {
        kmeans->sample_model[i] = 0;
    }

    int sample_model_change_count = kmeans->N;  //记录迭代的时候有几个样本变化聚类中心。
    int iterate_count = 0;
    while (sample_model_change_count > kmeans->N * 0.01f) {
        if (iterate_count > max_iterate_count) break;
        //如果迭代时聚类的元素变化少于样本总数的10%，或者达到最大迭代次数， 就退出。
        iterate_count++;

        if (kmeans->sample_model == kmeans->sample_model_1) { //先变化sample_model的指针。
            kmeans->sample_model = kmeans->sample_model_2;
        }
        else
        {
            kmeans->sample_model = kmeans->sample_model_1;
        }
        //聚类。
        for (int i = 0; i < kmeans->K; i++) {  //先初始化means_count
            kmeans->means_count[i] = 0;
        }
        for (int i = 0; i < kmeans->N; i++) {  //对第i个样品分配到距离它最近的类。
            real_t r, min;
            r = vec_manhattan_2(kmeans->sample_vector[i], kmeans->mean_vector[0]);
            min = r;
            int k = 0; //记录min对应的类序号。
            for (int ki = 1; ki < kmeans->K; ki++) {
                r = vec_manhattan_2(kmeans->sample_vector[i], kmeans->mean_vector[ki]);
                if (min > r) {
                    min = r;
                    k = ki;
                }
            }
            kmeans->sample_model[i] = k;
            kmeans->means_count[k]++;
        }
        //重新计算聚类中心.
        for (int i = 0; i < kmeans->K; i++) {  //先将聚类中心归零
            vec_init(kmeans->mean_vector[i], 0);
        }
        for (int i = 0; i < kmeans->N; i++) {
            vec_add(kmeans->mean_vector[kmeans->sample_model[i]], kmeans->sample_vector[i], kmeans->mean_vector[kmeans->sample_model[i]]);
            //第i个样本sample_vector[i]的聚类中心是sample_model[i]
        }
        for (int i = 0; i < kmeans->K; i++) {  //计算聚类样本的均值。
            vec_mul(kmeans->mean_vector[i], 1.0f / kmeans->means_count[i], kmeans->mean_vector[i]);
        }

        //刷新sample_model_change_count。 
        // sample_model在迭代的时候会在sample_model_1和sample_model_2来回切换，只要对比sample_model_1和sample_model_2即可
        sample_model_change_count = 0;
        for (int i = 0; i < kmeans->N; i++) {
            if (kmeans->sample_model_1[i] != kmeans->sample_model_2[i])sample_model_change_count++;  //样本分类有变化。
        }
        sample_model_change_count /= 2;  //一个样本变化，sample_model_change_count会加两次
    }
    return iterate_count;
}
//

    /// <summary>
    /// 测试k均值分布。 用达到的类：Vector，Gauss，Matrix，TwoFunctionPicture。
    /// </summary>

void test_k_means()
{
    vec v;
    gauss_vec g;
    mat a, at, cov;

    vec_allocate(&v, 2);
    gauss_vec_init(&g, 2);
    mat_allocate(&a, 2, 2);
    mat_allocate(&at, 2, 2);
    mat_allocate(&cov, 2, 2);

    real_t theta;
    const int t1 = 100;  //0-100 第一个分布
    const int t2 = 300;  //100-300 第二个分布
#define len 600 
      //  const int len = 600; //300-600 第三个分布
    real_t x[len];
    real_t y[len];
    COLOR_RGB color[len];

    //初始化协方差矩阵
    {
        theta = PI_div_3;
        a.data[0][0] = 1, a.data[0][1] = 2;
        a.data[1][0] = -3, a.data[1][1] = 2;
        mat_transpose_to(a, at);
        mat_mul(a, at, g.covariance);
        mat_print(g.covariance);
        gauss_vec_renew_cov(&g);
        printfs(" \r\n init_covariance ");
        g.mean_vector.data[0] = 0;
        g.mean_vector.data[1] = 0;
    }
    for (int i = 0; i < t1; i++) {
        gauss_vec_randomize(g);
        x[i] = g.random.data[0];
        y[i] = g.random.data[1];
        color[i] = RGB_red;
    }

    //初始化协方差矩阵
    {
        theta = -PI_div_4;
        a.data[0][0] = 1 * cosf(theta), a.data[0][1] = 1 * sinf(theta);
        a.data[1][0] = -2 * sinf(theta), a.data[1][1] = 2 * cosf(theta);
        mat_transpose_to(a, at);
        mat_mul(a, at, g.covariance);

        mat_print(a);
        mat_print(at);
        mat_print(g.covariance);
        gauss_vec_renew_cov(&g);
        printfs(" \r\n init_covariance ");
        g.mean_vector.data[0] = 3;
        g.mean_vector.data[1] = 4;
    }
    for (int i = t1; i < t2; i++) {
        gauss_vec_randomize(g);
        x[i] = g.random.data[0];
        y[i] = g.random.data[1];
        color[i] = RGB_green;
    }

    //初始化协方差矩阵
    {
        theta = PI_div_3 / 2;
        a.data[0][0] = 2.8, a.data[0][1] = 1.4;
        a.data[1][0] = 1.8, a.data[1][1] = 1.7;
        //a.data[0][0] = 1 * cosf(theta), a.data[0][1] = 1 * sinf(theta);
        //a.data[1][0] = -1 * sinf(theta), a.data[1][1] = 1 * cosf(theta);
        mat_transpose_to(a, at);
        mat_mul(a, at, g.covariance);
        gauss_vec_renew_cov(&g);
        mat_print(g.covariance);
        printfs(" \r\n init_covariance ");
        g.mean_vector.data[0] = -5;
        g.mean_vector.data[1] = 4;
    }
    for (int i = t2; i < len; i++) {
        gauss_vec_randomize(g);
        x[i] = g.random.data[0];
        y[i] = g.random.data[1];
        color[i] = RGB_blue;
    }

    printfs(" \r\n begin  clustering  ");
    K_Means k;
    k.K = 1;
    k.N = 1;
    k.D = 1;
    K_Means_allocate(&k);

    mat mdata;
    mat_allocate(&mdata, len, 2);
    for (int i = 0; i < len; i++) {
        mdata.data[i][0] = x[i];
        mdata.data[i][1] = y[i];
    }

    TwoFunctionPicture tp;
    TwoFunctionPicture_init(&tp, 0, 0, 700, 450);
    char title[] = "Curve Chart";          //标题
    char xtitle[] = "xtitle time";          //x轴标题
    char y1title[] = "y1title cm";          //y轴标题
    char y2title[] = "y2title km";          //y轴标题
    TwoFunctionPicture_draw_title(&tp, title, xtitle, y1title, y2title);

    TwoFunctionPicture_draw_func(&tp,
        x, y, len,
        NULL, NULL, 0,
        0, 0, 0, color, NULL);

    GD_wait_key_input();	// 按任意键继续

    for (int iterate = 1; iterate < 50; iterate++) {
        int t = K_Means_clustering(&k, 3, mdata, iterate);
        printfs(" \r\n  K_Means_clustering == %d  ", t);
        COLOR_RGB kcolor[3] = { RGB_cyan,RGB_lightmagenta,RGB_yellow };  //每个聚类中心的颜色
        for (int i = 0; i < k.N; i++) {
            color[i] = kcolor[k.sample_model[i]];     //给数组分配颜色。
        }

        TwoFunctionPicture_draw_func(&tp,
            x, y, len,
            NULL, NULL, 0,
            0, 0, 0, color, NULL);
        GD_wait_key_input();	// 按任意键继续
    }



}
//




//高斯混合模型 Gauss Mixture Model


/// <summary>
/// 根据K，N，D分配内存。
/// </summary>
void GMM_allocate(GMM* gmm)
{
    gmm->sample_vector = (vec*)malloc(gmm->N * sizeof(vec));
    for (int i = 0; i < gmm->N; i++) {
        vec_allocate(&(gmm->sample_vector[i]), gmm->D);
    }
    gmm->gauss = (gauss_vec*)malloc(gmm->K * sizeof(gauss_vec));
    for (int i = 0; i < gmm->K; i++) {
        gauss_vec_init(&(gmm->gauss[i]), gmm->D);
    }
    mat_allocate(&gmm->gamma_1, gmm->N, gmm->K);
    mat_allocate(&gmm->gamma_2, gmm->N, gmm->K);
    gmm->pi = (real_t*)malloc(gmm->K * sizeof(real_t));
    gmm->sample_model = (int*)malloc(gmm->N * sizeof(int));
}
/// <summary>
/// 释放内存
/// </summary>
void GMM_deallocate(GMM* gmm)
{
    if (gmm->sample_vector != NULL) {
        for (int i = 0; i < gmm->N; i++) {
            vec_deallocate(&gmm->sample_vector[i]);
        }
        free(gmm->sample_vector);
        gmm->sample_vector = NULL;
    }
    if (gmm->gauss != NULL) {
        for (int i = 0; i < gmm->K; i++) {
            gauss_vec_release(&gmm->gauss[i]);
        }
        free(gmm->gauss);
        gmm->gauss = NULL;
    }
    mat_deallocate(&gmm->gamma_1);
    mat_deallocate(&gmm->gamma_2);

    if (gmm->pi != NULL) free(gmm->pi);
    if (gmm->sample_model != NULL) free(gmm->sample_model);
}


/// <summary>
/// 第一次初始化参数。使用K均值聚类初始化。
/// </summary>
/// <param name="sample">样本数据</param>
void GMM_init_parameter(GMM* gmm, mat sample, mat m, vec v)
{
    K_Means kmeans;
    kmeans.K = 1;
    kmeans.N = 1;
    kmeans.D = 1;
    K_Means_allocate(&kmeans);


    K_Means_clustering(&kmeans, gmm->K, sample, 20);

    for (int k = 0; k < gmm->K; k++) {
        gmm->pi[k] = (real_t)kmeans.means_count[k] / gmm->N;
    }
    //更新均值。
    for (int k = 0; k < gmm->K; k++) {
        vec_copyfrom(gmm->gauss[k].mean_vector, kmeans.mean_vector[k]);
    }
    real_t** m_data = m.data;
    real_t* v_data = v.data;
    //更新协方差矩阵。
    for (int k = 0; k < gmm->K; k++) {
        mat_init(gmm->gauss[k].covariance, 0);
    }
    for (int n = 0; n < gmm->N; n++) {
        vec_sub(gmm->sample_vector[n], kmeans.mean_vector[kmeans.sample_model[n]], v);
        for (int i = 0; i < m.rows; i++) {
            for (int j = 0; j < m.cols; j++) {
                m_data[i][j] = v_data[i] * v_data[j];
            }
        }
        mat_add(gmm->gauss[kmeans.sample_model[n]].covariance,
            m,
            gmm->gauss[kmeans.sample_model[n]].covariance);
    }

    for (int k = 0; k < gmm->K; k++) {
        mat_mul_real(gmm->gauss[k].covariance,
            1.0f / kmeans.means_count[k],
            gmm->gauss[k].covariance);
        gauss_vec_renew_cov(&gmm->gauss[k]);
    }

}

/// <summary>
/// E步骤：重新估计责任gamma
/// 即重新估计分布
/// </summary>
void GMM_Estep(GMM* gmm)
{
    //刷新gamma。
    if (gmm->gamma == &gmm->gamma_1)gmm->gamma = &gmm->gamma_2;
    else gmm->gamma = &gmm->gamma_1;

    for (int n = 0; n < gmm->N; n++) {
        for (int k = 0; k < gmm->K; k++) {
            real_t sum = 0;
            for (int j = 0; j < gmm->K; j++) {
                sum += gmm->pi[j] * gauss_vec_probability(gmm->gauss[j], gmm->sample_vector[n]);
            }
            gmm->gamma->data[n][k] = gmm->pi[k] * gauss_vec_probability(gmm->gauss[k], gmm->sample_vector[n]) / sum;
        }
    }
}

/// <summary>
/// M步骤：重新估计参数
/// 即重新估计高斯分布的参数
/// mat 是D*D大小的矩阵， v 的尺寸也是 D。用于辅助计算，保存临时变量。避免迭代的时候频繁分配内存。
/// </summary>
void GMM_Mstep(GMM* gmm, mat m, vec v)
{
    for (int k = 0; k < gmm->K; k++) {
        gmm->pi[k] = 0;
        for (int n = 0; n < gmm->N; n++) {
            gmm->pi[k] += gmm->gamma->data[n][k];   //归一化流到后面来。
        }
    }
    //Vector v(D);
    //Matrix m(D, D);
    //更新均值。
    for (int k = 0; k < gmm->K; k++) {
        vec_init(gmm->gauss[k].mean_vector, 0);
        for (int n = 0; n < gmm->N; n++) {
            vec_mul(gmm->sample_vector[n], gmm->gamma->data[n][k], v);
            vec_add(gmm->gauss[k].mean_vector, v, gmm->gauss[k].mean_vector);
        }
        vec_mul(gmm->gauss[k].mean_vector, 1.0f / gmm->pi[k], gmm->gauss[k].mean_vector);
    }
    real_t** m_data = m.data;
    real_t* v_data = v.data;
    //更新协方差矩阵。
    for (int k = 0; k < gmm->K; k++) {
        mat_init(gmm->gauss[k].covariance, 0);
        for (int n = 0; n < gmm->N; n++) {

            vec_sub(gmm->sample_vector[n], gmm->gauss[k].mean_vector, v);
            real_t temp = gmm->gamma->data[n][k] / gmm->pi[k];   //这里乘法的运算可以提取出来。
            for (int i = 0; i < m.rows; i++) {
                for (int j = 0; j < m.cols; j++) {
                    m_data[i][j] = v_data[i] * v_data[j] * temp;
                }
            }
            mat_add(gmm->gauss[k].covariance, m, gmm->gauss[k].covariance);
        }
        gauss_vec_renew_cov(&gmm->gauss[k]);
    }

    for (int k = 0; k < gmm->K; k++) {
        gmm->pi[k] = gmm->pi[k] / gmm->N;   //除以总数，归一化
    }

}

/// <summary>
/// converge 收敛, 汇聚, 衔接, 会聚, 汇合, 汇集。
/// 这里用gamma矩阵来判断收敛。gamma所有元素相加和为N。
/// 如果gamma所有元素的变化相差小于N * 0.01，即平均每个样本在k个模型中的概率变化小于1 % ，则停止迭代。
/// </summary>
bool GMM_is_converge(GMM* gmm)
{

    // Matrix m(N, K);
    // gamma_1->sub(*gamma_2, m);
    real_t** g1 = gmm->gamma_1.data;
    real_t** g2 = gmm->gamma_2.data;
    real_t stepping = 0;
    for (int i = 0; i < gmm->gamma->rows; i++) {
        for (int j = 0; j < gmm->gamma->cols; j++) {
            stepping += ABS(g1[i][j] - g2[i][j]);
        }
    }
    if (stepping < gmm->N * 0.01f) return true;
    else return false;
}


/// <summary>
/// 输入数据，计算高斯混合模型
/// </summary>
/// <param name="k">模型数量</param>
/// <param name="sample">样本数据</param>
/// <param name="max_iterate_count">最大的迭代次数。迭代次数不会超过它</param>
/// <returns>迭代次数</returns>
int GMM_calculate(GMM* gmm, int k, mat sample, int max_iterate_count)
{

    GMM_deallocate(gmm);
    gmm->K = k;
    gmm->N = sample.rows;
    gmm->D = sample.cols;
    GMM_allocate(gmm);

    //下面初始化模型，然后开始迭代。

    //初始化样本矢量。
    for (int i = 0; i < gmm->N; i++) {
        for (int j = 0; j < sample.cols; j++) {
            gmm->sample_vector[i].data[j] = sample.data[i][j];
        }
    }
    //初始化责任矩阵gamma。一开始gamma_1和gamma_2不能一样，不然进不去while (is_converge()==false)的迭代循环。
    gmm->gamma = &gmm->gamma_1;
    mat_init(gmm->gamma_1, 0);
    mat_init(gmm->gamma_2, 1);

    vec v;
    mat m;
    vec_allocate(&v, gmm->D);
    mat_allocate(&m, gmm->D, gmm->D);
    GMM_init_parameter(gmm, sample, m, v);

    int iterate_count = 0;
    while (GMM_is_converge(gmm) == false)
    {
        if (iterate_count > max_iterate_count) break;  //如果迭代达到最大迭代次数， 就退出。
        iterate_count++;
        GMM_Estep(gmm);
        //    gamma->print();
        GMM_Mstep(gmm, m, v);
        //    print();
    }
    vec_deallocate(&v);
    mat_deallocate(&m);
    //现在收敛了，计算每个样本最可能所属的模型。sample_model。
    for (int n = 0; n < gmm->N; n++) {
        int index = 0;
        real_t temp = gmm->gamma->data[n][0];
        for (int k = 1; k < gmm->K; k++) {
            if (gmm->gamma->data[n][k] > temp) {
                temp = gmm->gamma->data[n][k];
                index = k;
            }
        }
        gmm->sample_model[n] = index;   //样本所属模型概率最大的那个。
    }

    return iterate_count;
}


void test_gmm()
{
    vec v;
    gauss_vec g;
    mat a, at, cov;

    vec_allocate(&v, 2);
    gauss_vec_init(&g, 2);
    mat_allocate(&a, 2, 2);
    mat_allocate(&at, 2, 2);
    mat_allocate(&cov, 2, 2);

    real_t theta;
    const int t1 = 100;  //0-100 第一个分布
    const int t2 = 300;  //100-300 第二个分布
#define len 600 
        //  const int len = 600; //300-600 第三个分布
    real_t x[len];
    real_t y[len];
    COLOR_RGB color[len];

    //初始化协方差矩阵
    {
        theta = PI_div_3;
        a.data[0][0] = 1, a.data[0][1] = 2;
        a.data[1][0] = -3, a.data[1][1] = 2;
        mat_transpose_to(a, at);
        mat_mul(a, at, g.covariance);
        mat_print(g.covariance);
        gauss_vec_renew_cov(&g);
        printfs(" \r\n init_covariance ");
        g.mean_vector.data[0] = 0;
        g.mean_vector.data[1] = 0;
    }
    for (int i = 0; i < t1; i++) {
        gauss_vec_randomize(g);
        x[i] = g.random.data[0];
        y[i] = g.random.data[1];
        color[i] = RGB_red;
    }

    //初始化协方差矩阵
    {
        theta = -PI_div_4;
        a.data[0][0] = 1 * cosf(theta), a.data[0][1] = 1 * sinf(theta);
        a.data[1][0] = -2 * sinf(theta), a.data[1][1] = 2 * cosf(theta);
        mat_transpose_to(a, at);
        mat_mul(a, at, g.covariance);

        mat_print(a);
        mat_print(at);
        mat_print(g.covariance);
        gauss_vec_renew_cov(&g);
        printfs(" \r\n init_covariance ");
        g.mean_vector.data[0] = 3;
        g.mean_vector.data[1] = 4;
    }
    for (int i = t1; i < t2; i++) {
        gauss_vec_randomize(g);
        x[i] = g.random.data[0];
        y[i] = g.random.data[1];
        color[i] = RGB_green;
    }

    //初始化协方差矩阵
    {
        theta = PI_div_3 / 2;
        a.data[0][0] = 2.8, a.data[0][1] = 1.4;
        a.data[1][0] = 1.8, a.data[1][1] = 1.7;
        //a.data[0][0] = 1 * cosf(theta), a.data[0][1] = 1 * sinf(theta);
        //a.data[1][0] = -1 * sinf(theta), a.data[1][1] = 1 * cosf(theta);
        mat_transpose_to(a, at);
        mat_mul(a, at, g.covariance);
        gauss_vec_renew_cov(&g);
        mat_print(g.covariance);
        printfs(" \r\n init_covariance ");
        g.mean_vector.data[0] = -5;
        g.mean_vector.data[1] = 4;
    }
    for (int i = t2; i < len; i++) {
        gauss_vec_randomize(g);
        x[i] = g.random.data[0];
        y[i] = g.random.data[1];
        color[i] = RGB_blue;
    }

    printfs(" \r\n begin  clustering  ");

    GMM gmm;
    gmm.K = 1;
    gmm.N = 1;
    gmm.D = 1;
    GMM_allocate(&gmm);

    mat mdata;
    mat_allocate(&mdata, len, 2);
    for (int i = 0; i < len; i++) {
        mdata.data[i][0] = x[i];
        mdata.data[i][1] = y[i];
    }

    TwoFunctionPicture tp;
    TwoFunctionPicture_init(&tp, 0, 0, 700, 450);
    char title[] = "Curve Chart";          //标题
    char xtitle[] = "xtitle time";          //x轴标题
    char y1title[] = "y1title cm";          //y轴标题
    char y2title[] = "y2title km";          //y轴标题
    TwoFunctionPicture_draw_title(&tp, title, xtitle, y1title, y2title);

    TwoFunctionPicture_draw_func(&tp,
        x, y, len,
        NULL, NULL, 0,
        0, 0, 0, color, NULL);
    GD_wait_key_input();	// 按任意键继续
    for (int iterate = 1; iterate < 50; iterate++) {
        int t = GMM_calculate(&gmm, 3, mdata, iterate);
        printfs(" \r\n  gmm.calculate  == %d  ", t);
        COLOR_RGB kcolor[3] = { RGB_cyan,RGB_lightmagenta,RGB_blue };  //每个聚类中心的颜色
        for (int i = 0; i < gmm.N; i++) {
            color[i] = kcolor[gmm.sample_model[i]];     //给数组分配颜色。
        }
        TwoFunctionPicture_draw_func(&tp,
            x, y, len,
            NULL, NULL, 0,
            0, 0, 0, color, NULL);
        GD_wait_key_input();	// 按任意键继续

    }





}
//










