﻿using System;
using System.IO;
using System.Linq;
using System.Collections.Generic;
using SkiaSharp;

public static class SimpleOcr
{
    // ======== 使用 SkiaSharp 读取并缩放图片 ========
    public static float[] ImageToInput(string path, int targetW = 28, int targetH = 28)
    {
        using var data = SKData.Create(path);
        using var bitmap = SKBitmap.Decode(data);
        using var resized = bitmap.Resize(new SKImageInfo(targetW, targetH), SKFilterQuality.High);

        var result = new float[targetW * targetH];
        for (int y = 0; y < targetH; y++)
        {
            for (int x = 0; x < targetW; x++)
            {
                var c = resized.GetPixel(x, y);
                float gray = (0.299f * c.Red + 0.587f * c.Green + 0.114f * c.Blue) / 255f;
                result[y * targetW + x] = gray;
            }
        }
        return result;
    }

    // ======== 最简前馈神经网络 ========
    public class Dense
    {
        public int In, Out;
        public float[,] W;
        public float[] B;

        public Dense(int input, int output, Random rng)
        {
            In = input; Out = output;
            W = new float[Out, In];
            B = new float[Out];
            double scale = Math.Sqrt(2.0 / (input + output));
            for (int i = 0; i < Out; i++)
                for (int j = 0; j < In; j++)
                    W[i, j] = (float)((rng.NextDouble() * 2 - 1) * scale);
        }

        public float[] Forward(float[] x)
        {
            var y = new float[Out];
            for (int i = 0; i < Out; i++)
            {
                double s = B[i];
                for (int j = 0; j < In; j++) s += W[i, j] * x[j];
                y[i] = (float)s;
            }
            return y;
        }

        public float[] Backward(float[] x, float[] gradOut, float lr)
        {
            var gradIn = new float[In];
            for (int j = 0; j < In; j++)
            {
                double s = 0;
                for (int i = 0; i < Out; i++) s += W[i, j] * gradOut[i];
                gradIn[j] = (float)s;
            }
            for (int i = 0; i < Out; i++)
            {
                for (int j = 0; j < In; j++)
                    W[i, j] -= lr * gradOut[i] * x[j];
                B[i] -= lr * gradOut[i];
            }
            return gradIn;
        }
    }

    static float[] ReLU(float[] x) => x.Select(v => v > 0 ? v : 0).ToArray();
    static float[] ReLUBack(float[] x, float[] grad) =>
        x.Zip(grad, (a, g) => a > 0 ? g : 0).ToArray();

    static float SoftmaxLoss(float[] logits, int label, out float[] grad)
    {
        int n = logits.Length;
        float max = logits.Max();
        double sum = 0;
        var exp = new double[n];
        for (int i = 0; i < n; i++)
        {
            exp[i] = Math.Exp((logits[i] - max) / 2.0); // ✅ 降低温度
            sum += exp[i];
        }
        var probs = exp.Select(v => (float)(v / sum)).ToArray();
        grad = probs.ToArray();
        grad[label] -= 1f;
        return -MathF.Log(probs[label] + 1e-8f);
    }

    public class SimpleNet
    {
        public Dense fc1, fc2, fc3;
        public SimpleNet(int input, int hidden1, int hidden2, int output, Random rng)
        {
            fc1 = new Dense(input, hidden1, rng);
            fc2 = new Dense(hidden1, hidden2, rng);
            fc3 = new Dense(hidden2, output, rng);
        }

        public (float[] logits, float[] a1, float[] a2) Forward(float[] x)
        {
            var a1 = ReLU(fc1.Forward(x));
            var a2 = ReLU(fc2.Forward(a1));
            var logits = fc3.Forward(a2);
            return (logits, a1, a2);
        }

        public float TrainOne(float[] x, int label, float lr)
        {
            var (logits, a1, a2) = Forward(x);
            var loss = SoftmaxLoss(logits, label, out var grad);
            var da2 = fc3.Backward(a2, grad, lr);
            var dz2 = ReLUBack(a2, da2);
            var da1 = fc2.Backward(a1, dz2, lr);
            var dz1 = ReLUBack(a1, da1);
            fc1.Backward(x, dz1, lr);

            #region 🧠 Debug: 打印每次前向传播的 logits
            //if (new Random().NextDouble() < 0.002) // 每500次左右打印一次，避免刷屏
            //{
            //    Console.Write($"[Label {label}] ");
            //    for (int i = 0; i < logits.Length; i++)
            //        Console.Write($"{i:D2}:{logits[i]:F2} ");
            //    Console.WriteLine();
            //}
            #endregion

            return loss;
        }

        public int Predict(float[] x)
        {
            var (logits, _, _) = Forward(x);
            int maxIdx = 0;
            float maxVal = logits[0];
            for (int i = 1; i < logits.Length; i++)
            {
                if (logits[i] > maxVal)
                {
                    maxVal = logits[i];
                    maxIdx = i;
                }
            }
            return maxIdx;
        }
    }

    // ======== 数据加载与训练 ========
    public static (float[][], int[], string[]) LoadDataset(string root)
    {
        var labelDirs = Directory.GetDirectories(root);
        var labelNames = labelDirs.Select(Path.GetFileName).ToArray();
        var data = new List<float[]>();
        var labels = new List<int>();
        foreach (var (dir, idx) in labelDirs.Select((d, i) => (d, i)))
        {
            foreach (var f in Directory.GetFiles(dir, "*.png"))
            {
                var x = ImageToInput(f);
                data.Add(x);
                labels.Add(idx);
            }
        }
        return (data.ToArray(), labels.ToArray(), labelNames);
    }

    public static SimpleNet Train(string trainDir, int epochs = 10, float lr = 0.01f)
    {
        var (trainX, trainY, labels) = LoadDataset(trainDir);
        var net = new SimpleNet(28 * 28, 128, 64, labels.Length, new Random(1));
        for (int e = 0; e < epochs; e++)
        {
            double lossSum = 0;
            for (int i = 0; i < trainX.Length; i++)
                lossSum += net.TrainOne(trainX[i], trainY[i], lr);
            Console.WriteLine($"Epoch {e + 1}: avg loss = {lossSum / trainX.Length:F4}");
        }
        SaveModel(net, "model.bin");
        return net;
    }

    // ======== 模型保存 / 加载 ========
    public static void SaveModel(SimpleNet net, string path)
    {
        using var bw = new BinaryWriter(File.Create(path));
        void WriteDense(Dense d)
        {
            bw.Write(d.In); bw.Write(d.Out);
            for (int i = 0; i < d.Out; i++)
                for (int j = 0; j < d.In; j++)
                    bw.Write(d.W[i, j]);
            for (int i = 0; i < d.Out; i++) bw.Write(d.B[i]);
        }
        WriteDense(net.fc1); WriteDense(net.fc2); WriteDense(net.fc3);
    }

    public static SimpleNet LoadModel(string path)
    {
        using var br = new BinaryReader(File.OpenRead(path));
        Dense ReadDense()
        {
            int input = br.ReadInt32();
            int output = br.ReadInt32();
            var d = new Dense(input, output, new Random(1));
            for (int i = 0; i < d.Out; i++)
                for (int j = 0; j < d.In; j++)
                    d.W[i, j] = br.ReadSingle();
            for (int i = 0; i < d.Out; i++)
                d.B[i] = br.ReadSingle();
            return d;
        }
        var fc1 = ReadDense();
        var fc2 = ReadDense();
        var fc3 = ReadDense();
        var net = new SimpleNet(fc1.In, fc1.Out, fc2.Out, fc3.Out, new Random(1))
        {
            fc1 = fc1,
            fc2 = fc2,
            fc3 = fc3
        };
        return net;
    }
}
