﻿/*
* 版权所有 东海仙人岛 2025年3月 
* B站：https://space.bilibili.com/627167269
* 功能需求：
* 手写数组输入功能，可视化展示机器学习神经网络的结果，模拟神经元链接，动态展示识别过程的变化
* 设计方案：
* 界面显示，采用cvui; 权重文件的读取，采用opencv 4.10.0
* 网络结构，输入层 28*28，隐藏层 64 ，输出层 10
* 激活函数
* 使用说明：
* 权重文件，采用MNIST训练的数据集
*左键书写，右键擦除，esc按键退出.左侧的复选框可以显示或隐藏神经元连线。
*2025-3-8 更新记录：位于之前用32为float数，导致负无穷小和无穷大的数判断出现问题，修改为64位数参与计算。
* 2025-4-22 改为卷积神经网络
*/
#include <iostream>
#include <torch/torch.h>
#include <opencv2/opencv.hpp>
#include <vector>
#include <string>
#include <cstring>
#include <fstream>
#include <opencv2/core/utils/logger.hpp>
#include <opencv2/highgui/highgui.hpp>
#include "CThread.h"
#include <filesystem>
#include <ctime>  // 添加头文件，用于记录时间
#include <torch/data/datasets/tensor.h>  // 注意：TensorDataset 定义在这里
#include <net.h>

using namespace std;
using namespace cv;
using namespace cv::ml;


// One (and only one) of your C++ files must define CVUI_IMPLEMENTATION
// before the inclusion of cvui.h to ensure its implementaiton is compiled.
#define CVUI_IMPLEMENTATION
#include "cvui.h"
#include "cv_puttextzh.h"

#define WINDOW_NAME "数字手写识别可视化 "
struct draw_s {
	int x;
	int y;
	float v;
};
//调色板
uchar doubleColorValue_R = 233;
uchar doubleColorValue_G = 56;
uchar doubleColorValue_B = 56;
uchar doubleColorValue_A = 128;
unsigned int  doubleColorValue_bg_R = 255;
unsigned int  doubleColorValue_bg_G = 255;
unsigned int  doubleColorValue_bg_B = 255;
unsigned int  doubleColorValue_bg_A = 255;
unsigned int  doubleColorValue_A_arry[28 * 28] = { 0 };
unsigned int  bdColor;
unsigned int  filColor;
unsigned int  filColor_bg;
unsigned int  filColor_arry[28*28];
//书写方格子
draw_s g_inputLayerDraw[28*28];
cv::Rect g_inRect = { 400,480,7,7 };
//展开格子显示
draw_s g_inputLayerDraw784[28 * 28];
draw_s g_inputLayerDraw784_w[28 * 28];
//隐藏层坐标和权重信息，64个计算结果
draw_s g_hidLayerDraw[64];
//输出层坐标和权重信息，10个计算结果
draw_s g_outputLayerDraw[10];
int g_mouse_x;
int g_mouse_y;
cv::String g_ResultString = " ";
int g_gradw = 30;
int g_gradh = 50;
cv::Ptr<cv::ml::ANN_MLP> ann;
uint8_t g_mouseKeyleftRight = 0;
//定义输出buffer
cv::AutoBuffer<double> _buf(1568 + 10);
//结果输出矩阵
Mat outputs;

// 获取每一层的权重矩阵,读出来是CV_64F
//Mat weightMat = ann->getWeights(0);
//***** 第一层权重特殊处理。

cv::String trainEPOCH = "10";
cv::String labelhw = "";
const std::string weight_file = "../weights/mnist_cnn_weights.pt"; // 权重文件路径

const int batch_size = 1;
const double learning_rate = 0.01;
const int momentum = 0.5;
//const int EPOCH = 2;

// 检查 CUDA 是否可用
torch::Device device(torch::kCPU); // 默认选择 CPU

//定义一个存储卷积输出的向量 1 64 7 7
torch::Tensor g_ts_conv77;
torch::Tensor g_ts_conv1414;
//保存卷积输出的三维数组
float conv_array[64][7][7];
float conv_array2[32][14][14];

auto inference_model = std::make_shared<Net>();

// OpenCV Mat 转为 torch Tensor
torch::Tensor matToTensor(const cv::Mat& image) {
	cv::Mat gray;
	cv::cvtColor(image, gray, cv::COLOR_BGR2GRAY); // 转为灰度图
	cv::Mat resized;
	cv::resize(gray, resized, cv::Size(28, 28)); // 调整为 28x28
	resized.convertTo(resized, CV_32F, 1.0 / 255); // 归一化到 [0, 1]

	//auto tensor = torch::from_blob(resized.data, {1, 1, 28, 28}, torch::kFloat32); // 创建张量
	//tensor = tensor.clone().to(device); // 迁移到 GPU

	// 假设输入 tensor 的范围是 [0, 1]，接下来对每个像素值执行归一化
	auto tensor = torch::from_blob(resized.data, { 1, 1, 28, 28 }, torch::kFloat32).clone();
	// 使用相同的均值和标准差进行归一化？？？？？？？
	tensor = tensor.sub(0.1307).div(0.3081);
	// 注意：如果归一化操作需要转移到 device，需在最后调用 .to(device)
	tensor = tensor.to(device);
	return tensor;
}
/*
* 画线的函数
*/
void linkOut2hid(cv::Mat& frame)
{
	cv::Point2i pt2;
	cv::Scalar color1(231, 148, 31);
	cv::Scalar color2(150, 150, 150);
	cv::Scalar color3(100, 100, 100);
	cv::Scalar color4(50, 50, 50);
	for (int i = 0; i < sizeof(g_outputLayerDraw) /sizeof(g_outputLayerDraw[0]); i++)
	{
		cv::Point2i pt1 = cv::Point2i(g_outputLayerDraw[i].x + g_gradw/2, g_outputLayerDraw[i].y+ g_gradh);//*10
		
		for (int j = 0; j < sizeof(g_hidLayerDraw) / sizeof(g_hidLayerDraw[0]); j++)
		{
			pt2 = cv::Point2i(g_hidLayerDraw[j].x+ 7.0/2, g_hidLayerDraw[j].y); //*64
			if (g_outputLayerDraw[i].v > 0.1 && g_hidLayerDraw[j].v > 0.3 )
			{
				cv::line(frame, pt1, pt2, color1, 1, 1);
			}
		}	
	}
}
//输入层到隐藏层的连线
void linkInput2Hid(cv::Mat& frame)
{
	cv::Point2i pt2;
	int cr = 0;
	
	for (int i = 0; i < sizeof(g_inputLayerDraw784) /sizeof(g_inputLayerDraw784[0]); i++)
	{
		cv::Point2i pt1 = cv::Point2i(g_inputLayerDraw784[i].x + 7/2, g_inputLayerDraw784[i].y);//*10
		
		for (int j = 0; j < sizeof(g_hidLayerDraw) / sizeof(g_hidLayerDraw[0]); j++)
		{
			pt2 = cv::Point2i(g_hidLayerDraw[j].x+ 7.0/2, g_hidLayerDraw[j].y +26); //*64
			if (g_inputLayerDraw784[i].v > 0 && g_hidLayerDraw[j].v > 0.7 )
			{
				//cv::line(frame, pt1, pt2, cv::Scalar(173, 190, 241), 1, 1);
				cvui:line(frame, pt1, pt2, cv::Scalar(231, 148, 31),1,8,0);
				//cvui::rect(frame, pt1.x, pt1.y, rectangleR.width, rectangleR.height, 0xaaaaaa, 0xdaaaa0000);
			}
		}	
	}
}


//预测函数
void PredicationStart()
{
	//手写面板点阵传递给输入
	float inputArry[28 * 28]; 
	for (int i = 0; i < 28 * 28; i++)
	{
		inputArry[i] = g_inputLayerDraw[i].v;
	}

	//格式化输入
	Mat inputs = Mat(28, 28, CV_32FC1, inputArry);
	inputs.convertTo(inputs, CV_32F, 1.0 / 255); // 归一化到 [0, 1]
	auto input_tensor = torch::from_blob(inputs.data, { 1, 1, 28, 28 }, torch::kFloat32).clone(); // 创建张量
	// 使用相同的均值和标准差进行归一化？？？？？？？
	input_tensor = input_tensor.sub(0.1307).div(0.3081);
	// 注意：如果归一化操作需要转移到 device，需在最后调用 .to(device)
	input_tensor = input_tensor.to(device);

	// 模型推理
	input_tensor = input_tensor.to(device); // 确保推理数据位于 GPU
	auto output = inference_model->forward(input_tensor);
	//std::cout << "output: " << output.size(1) << std::endl;
	//std::cout << "output" << output << std::endl;
	auto prediction = output.argmax(1);
	auto probabilities = torch::softmax(output, 1);
	// 获取预测类别的置信度
	auto confidence = probabilities[0][prediction.item<int>()].item<float>();
	//std::cout << "Predicted Label: " << prediction.item<int>() << std::endl;
	//std::cout << "Confidence: " << confidence << std::endl;
	g_ResultString = "识别结果:" + to_string(prediction.item<int>()) + " 可信度:" + to_string(confidence);
	
	int max_index = prediction.item<int>();

	for (int j = 0; j < probabilities.size(1); ++j) {
		g_outputLayerDraw[j].v = probabilities[0][j].item<float>();
		//if (g_outputLayerDraw[i].v < 0)g_outputLayerDraw[i].v = 0;
	}
	
	//把卷积结果赋值给三维数组
	
	int n = g_ts_conv77.numel();

	if ( n> 0) {
		g_ts_conv77 = g_ts_conv77.to(torch::kFloat32);
		//cout << g_ts_conv77[0][0].sizes() << endl;
		//std::cout << "Tensor dtype:" << g_ts_conv77.dtype() <<"dev type"<< g_ts_conv77.device() << std::endl;
		//std::cout << "max:" << g_ts_conv77.max()  << "min " << g_ts_conv77.min() << "mean " << g_ts_conv77.mean() << std::endl;

		auto accessor = g_ts_conv77.accessor<float, 4>();  // 4 维访问器：[batch, channel, height, width]
		for (int c = 0; c < 64; ++c) {
			for (int h = 0; h < 7; ++h) {
				for (int w = 0; w < 7; ++w) {
					conv_array[c][h][w] = accessor[0][c][h][w];  // batch 下标为 0
				}
			}
		}
	}
	if (g_ts_conv1414.numel() > 0) {
		g_ts_conv1414 = g_ts_conv1414.to(torch::kFloat32);

		auto accessor2 = g_ts_conv1414.accessor<float, 4>();  // 4 维访问器：[batch, channel, height, width]
		for (int c = 0; c < 32; ++c) {
			for (int h = 0; h < 14; ++h) {
				for (int w = 0; w < 14; ++w) {
					conv_array2[c][h][w] = accessor2[0][c][h][w];  // batch 下标为 0
				}
			}
		}
	}
}
//鼠标按键移动处理函数
void mouseAction(cv::Mat &frame)
{
	cv::Rect rectangleL(130, 10, 20, 20);
	cv::Rect rectangleR(150, 10, 20, 20);
	cvui::rect(frame, rectangleL.x, rectangleL.y, rectangleL.width, rectangleL.height, 0xaaaaaa, 0xdff000000);
	cvui::rect(frame, rectangleR.x, rectangleR.y, rectangleR.width, rectangleR.height, 0xaaaaaa, 0xdff000000);

	g_mouse_x = cvui::mouse().x;
	g_mouse_y = cvui::mouse().y;
	cvui::printf(frame, 10, 10, "(%d,%d)", cvui::mouse().x, cvui::mouse().y);
	// Did any mouse button go down? 按下的时刻调用一次
	if (cvui::mouse(cvui::DOWN)) {
		// Position the rectangle at the mouse pointer.
		//cvui::text(frame, 10, 70, "<-");
	}

	// Is any mouse button down (pressed)? //按下之后一直回调，适合按下之后鼠标书写
	if (cvui::mouse(cvui::IS_DOWN)) {
		// Adjust rectangle dimensions according to mouse pointer
		//cvui::text(frame, 10, 70, " clicked!");
		if ((g_mouse_x > g_inRect.x && g_mouse_x < g_inRect.x + g_inRect.width*28)
			&& (g_mouse_y > g_inRect.y && g_mouse_y < g_inRect.y + g_inRect.height*28))
		{
			//cout << "g_mouse_x" << g_mouse_x << endl;

			int gx = (g_mouse_x - g_inRect.x) / g_inRect.width;
			int gy = (g_mouse_y - g_inRect.y) / g_inRect.height;
			g_inputLayerDraw[gy * 28 + gx].x = g_mouse_x;//绝对坐标用来画连接线
			g_inputLayerDraw[gy * 28 + gx].y = g_mouse_y;
			g_inputLayerDraw[gy * 28 + gx].v = g_mouseKeyleftRight; //左键书写，右键擦除
			if (gx > 1 && gx < 26 && gy >1 && gy < 26 && true) //如果需要加粗笔迹，置为true
			{
				int gxE = gx + 1;
				int gyE = gy + 1;
				g_inputLayerDraw[gyE * 28 + gxE].x = g_mouse_x;//绝对坐标用来画连接线
				g_inputLayerDraw[gyE * 28 + gxE].y = g_mouse_y;
				g_inputLayerDraw[gyE * 28 + gxE].v = g_mouseKeyleftRight; //左键书写，右键擦除
				gxE = gx ;
				gyE = gy + 1;
				g_inputLayerDraw[gyE * 28 + gxE].x = g_mouse_x;//绝对坐标用来画连接线
				g_inputLayerDraw[gyE * 28 + gxE].y = g_mouse_y;
				g_inputLayerDraw[gyE * 28 + gxE].v = g_mouseKeyleftRight; //左键书写，右键擦除
				gxE = gx + 1;
				gyE = gy ;
				g_inputLayerDraw[gyE * 28 + gxE].x = g_mouse_x;//绝对坐标用来画连接线
				g_inputLayerDraw[gyE * 28 + gxE].y = g_mouse_y;
				g_inputLayerDraw[gyE * 28 + gxE].v = g_mouseKeyleftRight; //左键书写，右键擦除
			}
		
			//目标识别开始,鼠标按下之后才开始识别
			PredicationStart();
		}
	}

	// Did any mouse button go up?
	if (cvui::mouse(cvui::UP)) {
		// Hide the rectangle
	}

	// Was the mouse clicked (any button went down then up)?
	if (cvui::mouse(cvui::CLICK)) {
		//cvui::text(frame, 10, 70, " clicked!");
	}
	if (cvui::mouse(WINDOW_NAME, cvui::LEFT_BUTTON, cvui::IS_DOWN))
	{
		//cvui::text(frame, 10, 70, "<-");
		g_mouseKeyleftRight = 255;
		cvui::rect(frame, rectangleL.x, rectangleL.y, rectangleL.width, rectangleL.height, bdColor, filColor);
	}
	if (cvui::mouse(WINDOW_NAME, cvui::RIGHT_BUTTON, cvui::IS_DOWN))
	{
		//cvui::text(frame, 10, 70, "->");
		g_mouseKeyleftRight = 0;
		cvui::rect(frame, rectangleR.x, rectangleR.y, rectangleR.width, rectangleR.height, 0xaaaaaa, 0xdaaaa0000);
	}
}

//绘制手写板
void inputLayerDraw(cv::Mat& frame)
{
	//g_inputLayerDraw
	for (int j = 0; j < 28; j++)
		for (int i = 0; i < 28; i++)
		{
			//if (g_inputLayerDraw[j * 28 + i].v == 255) {   g_inputLayerDraw784_w
			if (g_inputLayerDraw784_w[j * 28 + i].v >0) {
				//doubleColorValue_R = (int)(g_inputLayerDraw784[j * 28 + i].v)>> 8;
				//doubleColorValue_B = (int)(g_inputLayerDraw784[j * 28 + i].v) & 0x00FF;
				cvui::rect(frame, g_inRect.x + i * g_inRect.width, g_inRect.y + g_inRect.height * j, g_inRect.width, g_inRect.height, filColor_arry[j * 28 + i], filColor_arry[j * 28 + i]); //手写
			}
			else {
				cvui::rect(frame, g_inRect.x + i * g_inRect.width, g_inRect.y + g_inRect.height * j, g_inRect.width, g_inRect.height, 0xaaaaaa, 0xffa0a0a0);//画背景
			}
		}
	for (int j = 0; j < 28; j++)
		for (int i = 0; i < 28; i++)
		{
			if (g_inputLayerDraw[j * 28 + i].v == 255) {  // g_inputLayerDraw784_w
			
				cvui::rect(frame, g_inRect.x + i * g_inRect.width, g_inRect.y + g_inRect.height * j, g_inRect.width, g_inRect.height, bdColor, filColor); //手写
			}
		}
}

//绘制手写展开区
void inputExpansionLayerDraw(cv::Mat& frame) //
{
	int count = 0;
	int gradwh = 7;
	for (int j = 0; j < 7; j++)
		for (int i = 0; i < 120; i++,count++)
		{
			//cvui::rect(frame, 80 + i * (gradwh+2),  360 + (gradwh+2) * j, gradwh, gradwh, 0xaaaaaa, 0xffa0a0a0);
			if (g_inputLayerDraw[count].v == 255) {
				
				//0xaa0000  0x00880000手写笔记颜色，数越小，颜色越深 //acolor
				cvui::rect(frame, 80 + i * (gradwh + 2), 394 + (gradwh + 4) * j , gradwh, gradwh, bdColor, filColor);
			}
			else {
				cvui::rect(frame, 80 + i * (gradwh + 2), 394 + (gradwh + 4) * j, gradwh, gradwh, 0xaaaaaa, 0xffa0a0a0);//画背景
			}
			g_inputLayerDraw784[count].x = 80 + i * (gradwh + 2);
			g_inputLayerDraw784[count].y = 394 + (gradwh + 4) * j;
			//g_inputLayerDraw784[count].v = g_inputLayerDraw[count].v;
			if (count >= 784 -1)break;
		}
}
//拟合函数 y=255⋅((x−1​)/9)^2
inline unsigned int  colorMaker(float conv_value)
{
	unsigned int  doubleColorValue_bg_R = 255;
	unsigned int  doubleColorValue_bg_G = 255;
	unsigned int  doubleColorValue_bg_B = 255;
	unsigned int  doubleColorValue_bg_A = 255;
	unsigned int ret=0;

	float t = 0.0; 
	if (conv_value <= 0)t = 255 ; //全白
	else if (conv_value >= 0 && conv_value < 1)t = 255.0 - (35.0 * conv_value);//255~200 之间
	else if (conv_value >= 1 && conv_value < 2)t = 273.0 - (71.0 * conv_value);//200~128 之间
	else if (conv_value >= 2 && conv_value < 3)t = 309.0 - (89 * conv_value); //128~0
	else if (conv_value >= 3 && conv_value < 12)t = 76.0 - (5 * conv_value); //128~0
	else t = 0;//全黑
	//cout << "color " << conv_value << " t " << t << endl;
	doubleColorValue_bg_B = (unsigned int)(t); //蓝
	doubleColorValue_bg_G = (unsigned int)(t) << 8;
	doubleColorValue_bg_R = (unsigned int)(t) << 16;
	
	doubleColorValue_bg_A = 1 << 24;
	ret = doubleColorValue_bg_B | doubleColorValue_bg_G | doubleColorValue_bg_R | doubleColorValue_bg_A;
	return ret;
}
//绘制两个卷积层 64
void hidLayerDraw(cv::Mat& frame)
{
	//第一次卷积
	int gradwh = 40;
	for (int i = 0; i < 32; i++)
	{
		int x = 225 + (i % 16) * (gradwh + 2);
		int y = 296 + 42 * (i / 16);
		cvui::rect(frame, x, y, gradwh, gradwh, 0xaaaaaa, 0xffa0a0a0);
		for (int h = 0; h < 14; ++h) {
			//cout  << endl;
			for (int w = 0; w < 14; ++w) {
				//cout << conv_array[i][h][w] << "  ";
				unsigned int c = colorMaker(conv_array2[i][h][w]);
				cvui::rect(frame, x + (3 * w), y + (3 * h), 3, 3, c, c);
			}
			//cout << endl;
		}
	}
	//第二层卷积
	gradwh = 28;
	for (int i = 0; i < 64; i++)
	{
		int x = 208 + (i % 32) * (gradwh + 2);
		int y = 228 + 30 * (i / 32);
		cvui::rect(frame,x ,y , gradwh, gradwh, 0xaaaaaa, 0xffa0a0a0);
		for (int h = 0; h < 7; ++h) {
			for (int w = 0; w < 7; ++w) {
				unsigned int c = colorMaker(conv_array[i][h][w]);
					cvui::rect(frame, x +(4* w), y+(4 * h), 4, 4, c, c);
			}
		}
	}
}
//输出层
void outputLayerDraw(cv::Mat& frame)
{
	//int gradwh = 30;
	int x = 313, y = 108;
	for (int i = 0; i < 10; i++)
	{
		cvui::rect(frame, x + i * (g_gradw + 15), y , g_gradw, g_gradh, 0xaaaaaa, 0xffa0a0a0);
		cvui::text(frame, x + i * (g_gradw + 15)+5, y-20, to_string(i), 0.7, 1);
		g_outputLayerDraw[i].x = x + i * (g_gradw + 15);
		g_outputLayerDraw[i].y = y;
	}
}


int main(int argc, const char* argv[])
{
#if USECUDA_BOOL_CONST
	if (!torch::cuda::is_available()) {
		std::cerr << "CUDA is not available. Switching to CPU..." << std::endl;
		device = torch::Device(torch::kCPU); // 如果 GPU 不可用，切换到 CPU
	}
	else {
		std::cerr << "CUDA is available...." << std::endl;
		device = torch::Device(torch::kCUDA); // 如果 GPU 不可用，切换到 CPU
	}
	std::cout << "USECUDA_BOOL_CONST is true" << std::endl;
#else
	std::cout << "USECUDA_BOOL_CONST is false" << std::endl;
#endif
	//cout << "@ 2025 DongHai XianRen 控制台，运行时请勿关闭" << endl;
	bool Display_output_link = true;
	bool Display_input_link = false;
	bool windowsShow = true;
	//outputs = Mat(1, 10, CV_32F, buf + 1568);
	outputs.create(1, 10, CV_64F);

	utils::logging::setLogLevel(utils::logging::LOG_LEVEL_ERROR); //只打印错误信息
	// Create a frame where components will be rendered to.
	cv::Mat frame = cv::Mat(720, 1200, CV_8UC3);
	memset(g_outputLayerDraw, 0, sizeof(g_outputLayerDraw));

	// Init cvui and tell it to create a OpenCV window, i.e. cv::namedWindow(WINDOW_NAME).
	cvui::init(WINDOW_NAME);

	try {
#if 1
		// 加载权重并进行推理
		//const std::string weight_file = "../weights/mnist_cnn_weights.pt"; // 权重文件路径
		if (std::filesystem::exists(weight_file)) {
			torch::load(inference_model, weight_file);
			std::cout << "权重文件加载成功 " << weight_file << std::endl;
		}
		else {
			std::cout << "权重文件不存在，退出" << weight_file << std::endl;
			return 0;
		}
		
		inference_model->to(device); // 推理模型迁移到 GPU
		//eval()函数用于将模型切换到评估模式。与训练模式不同，评估模式会禁用一些特定的模块，例如 Dropout 和 Batch Normalization。
		inference_model->eval();
		std::cout << "Model weights loaded for inference " << weight_file << std::endl;
		// 调用推理功能（指定本地图片路径）
		//std::string image_path = "../data/0.jpg"; // 本地图片路径
		//inference(inference_model, image_path);
#endif		
	}
	catch (const std::exception& e) {
		std::cerr << "Runtime error: " << e.what() << std::endl;
		return -1;
	}
	 
	while (windowsShow) {
		//global color 
		unsigned int r, g, b,a;// = (unsigned int)g_inputLayerDraw784[count].v;
		r = doubleColorValue_B;
		g = doubleColorValue_G << 8;
		b = doubleColorValue_R << 16;
		a = doubleColorValue_A << 24;
		bdColor = r | g | b | a;
		filColor = bdColor;
		//filColor |= 0xff00000000;

		// Fill the frame with a nice color
		frame = cv::Scalar(255, 255, 255);

		// Render UI components to the frame
		//cvui::text(frame, 226, 13, "ANN Handwriting Visualization",1,1);
		 cvZH::putTextZH(frame,"数字手写识别可视化",	cv::Point(226, 13),	CV_RGB(0, 0, 0),30);
		 cvZH::putTextZH(frame, "卷积1: 32 14*14", cv::Point(37, 311), CV_RGB(0, 0, 0), 20);
		 cvZH::putTextZH(frame, "卷积2: 64 7*7", cv::Point(41, 225), CV_RGB(0, 0, 0), 20);
		 cvZH::putTextZH(frame, "全连接1: 1, 3136", cv::Point(53, 185), CV_RGB(0, 0, 0), 20);
		// cvZH::putTextZH(frame, "全连接2: 1, 10", cv::Point(53, 165), CV_RGB(0, 0, 0), 20);
		 cvZH::putTextZH(frame, "输出层 10", cv::Point(153, 107), CV_RGB(0, 0, 0), 20);
		 cvZH::putTextZH(frame, "手写板 28*28", cv::Point(444, 688), CV_RGB(0, 0, 0), 20);
		

		
		cvui::text(frame, 890, 500, "A", 0.6, 1);
		cvui::trackbar(frame, 895, 485, 150, &doubleColorValue_A, (uchar)0, (uchar)255, 0, "%.0Lf");
		cvui::text(frame, 890, 540, "R", 0.6, 1);
		cvui::trackbar(frame, 895, 525, 150, &doubleColorValue_R, (uchar)0, (uchar)255, 0, "%.0Lf");
		cvui::text(frame, 890, 580, "G", 0.6, 1);
		cvui::trackbar(frame, 895, 565, 150, &doubleColorValue_G, (uchar)0, (uchar)255, 0, "%.0Lf");
		cvui::text(frame, 890, 620, "B", 0.6, 1);
		cvui::trackbar(frame, 895, 605, 150, &doubleColorValue_B, (uchar)0, (uchar)255, 0, "%.0Lf");

		//鼠标的处理
		mouseAction(frame);
		inputLayerDraw(frame);
		inputExpansionLayerDraw(frame);
		hidLayerDraw(frame);
		outputLayerDraw(frame);

		//显示预测结果
		//cvui::text(frame, 777, 87, g_ResultString, 0.7, 1);

		//const char* p = g_ResultString.c_str();
		cvZH::putTextZH(frame, g_ResultString.c_str(), cv::Point(777, 87), CV_RGB(0, 0, 0), 20);
		//画出输出框的比例
		for (int i = 0; i < 10; i++)
		{
			//cout << i << "= " << g_outputLayerDraw[i].v << endl;
			//cout << g_outputLayerDraw[i].x << " " << g_outputLayerDraw[i].y << " " << g_gradh * g_outputLayerDraw[i].v + 1 << endl;
			cvui::rect(frame, g_outputLayerDraw[i].x, g_outputLayerDraw[i].y, g_gradw, g_gradh * g_outputLayerDraw[i].v + 1, bdColor, filColor); //手写
		}

		//button处理
		if (cvui::button(frame, 666, 638, "Clear")) {

			memset(g_inputLayerDraw, 0, sizeof(g_inputLayerDraw));
			memset(g_outputLayerDraw, 0, sizeof(g_outputLayerDraw));
			memset(g_hidLayerDraw, 0, sizeof(g_hidLayerDraw));
			memset(g_inputLayerDraw784, 0, sizeof(g_inputLayerDraw784));
			g_ResultString = " ";
		}
		if (cvui::button(frame, 1124, 8, "&Quit")) {
			break;
		}
		/*if (cvui::button(frame, 765, 600, "&Train")) {
			trainOneletter();
		}
		cvui::input(frame, 764, 496, 50, "myInput", labelhw);
		cvui::input(frame, 765, 529, 50, "trainEPOCH", trainEPOCH);*/
		//if (cvui::button(frame, 57, 166, "Link")) {
		//	linkOut2hid(frame);
		//}
		//复选框
		//cvui::checkbox(frame, 37, 166, "", &Display_output_link);
		//cvZH::putTextZH(frame, "显示输出层连接", cv::Point(61, 161), CV_RGB(0, 0, 0), 20);
		if(Display_output_link) linkOut2hid(frame);
		//cvui::checkbox(frame, 37, 356, "", &Display_input_link);
		//cvZH::putTextZH(frame, "显示输入层连接", cv::Point(61, 351), CV_RGB(0, 0, 0), 20);
		if (Display_input_link) linkInput2Hid(frame);
		// Update cvui stuff and show everything on the screen
		//copywrite of
		cvui::text(frame, 1004, 677, "@ 2025 DongHai XianRen", 0.4, 1);
		//cvui::text(frame, 41, 494, "Input layer 28*28", 0.5, 1);
		cvZH::putTextZH(frame, "输入层 28*28", cv::Point(41, 494), CV_RGB(0, 0, 0), 20, "Arial");
		//cvui::text(frame, 41, 304, "Hid layer 64", 0.5, 1);
		//cvui::text(frame, 132, 105, "output layer 10", 0.5, 1);
		

		// This function must be called *AFTER* all UI components. It does
		// all the behind the scenes magic to handle mouse clicks, etc.
		cvui::update();
		cvui::imshow(WINDOW_NAME, frame);

		int keyvalue = cv::waitKey(20);
		if (keyvalue == 27 || keyvalue == 81) { //ESC Q 按键退出
			break;
		} 
		if (keyvalue == 67) { //C 清除 
			memset(g_inputLayerDraw, 0, sizeof(g_inputLayerDraw));
			memset(g_outputLayerDraw, 0, sizeof(g_outputLayerDraw));
			memset(g_hidLayerDraw, 0, sizeof(g_hidLayerDraw));
			memset(g_inputLayerDraw784, 0, sizeof(g_inputLayerDraw784));
			g_ResultString = " ";
			memset(conv_array, 0, sizeof(conv_array));
			memset(conv_array2, 0, sizeof(conv_array2));
		}
		if (getWindowProperty(WINDOW_NAME, WND_PROP_AUTOSIZE) != 1)
		{
			break;
		}	
	}
	destroyAllWindows();
	return 0;
}