// network/pool_layer.go
package network

import (
	"math"
)

// PoolingMethod 定义池化方法
type PoolingMethod int

const (
	Max PoolingMethod = iota
	Average
)

// PoolLayer 实现池化层
type PoolLayer struct {
	InputDepth   int
	InputHeight  int
	InputWidth   int
	PoolHeight   int
	PoolWidth    int
	Stride       int
	Method       PoolingMethod
	OutputDepth  int
	OutputHeight int
	OutputWidth  int
	Inputs       *Tensor3D   // 保存前向传播的输入，用于反向传播
	MaxIndices   [][][][]int // 保存最大值的索引，用于最大池化的反向传播
}

// NewPoolLayer 创建一个新的池化层
func NewPoolLayer(inputDepth, inputHeight, inputWidth, poolHeight, poolWidth, stride int, method PoolingMethod) *PoolLayer {
	// 计算输出维度
	outputHeight := (inputHeight-poolHeight)/stride + 1
	outputWidth := (inputWidth-poolWidth)/stride + 1

	return &PoolLayer{
		InputDepth:   inputDepth,
		InputHeight:  inputHeight,
		InputWidth:   inputWidth,
		PoolHeight:   poolHeight,
		PoolWidth:    poolWidth,
		Stride:       stride,
		Method:       method,
		OutputDepth:  inputDepth,
		OutputHeight: outputHeight,
		OutputWidth:  outputWidth,
		MaxIndices:   make([][][][]int, inputDepth),
	}
}

// Forward 实现池化层的前向传播
func (p *PoolLayer) Forward(input interface{}) interface{} {
	inputTensor, ok := input.(*Tensor3D)
	if !ok {
		panic("PoolLayer.Forward: input is not a *Tensor3D")
	}

	// 保存输入用于反向传播
	p.Inputs = inputTensor.Clone()

	// 创建输出张量
	output := NewTensor3D(p.OutputDepth, p.OutputHeight, p.OutputWidth)

	// 初始化maxIndices
	p.MaxIndices = make([][][][]int, p.OutputDepth)
	for d := 0; d < p.OutputDepth; d++ {
		p.MaxIndices[d] = make([][][]int, p.OutputHeight)
		for h := 0; h < p.OutputHeight; h++ {
			p.MaxIndices[d][h] = make([][]int, p.OutputWidth)
			for w := 0; w < p.OutputWidth; w++ {
				p.MaxIndices[d][h][w] = make([]int, 2) // [y, x]
			}
		}
	}

	// 对每个通道执行池化
	for d := 0; d < p.InputDepth; d++ {
		for outY := 0; outY < p.OutputHeight; outY++ {
			for outX := 0; outX < p.OutputWidth; outX++ {
				// 计算输入的起始位置
				startY := outY * p.Stride
				startX := outX * p.Stride

				if p.Method == Max {
					// 最大池化
					maxVal := -math.MaxFloat64
					maxY, maxX := 0, 0

					for y := 0; y < p.PoolHeight; y++ {
						for x := 0; x < p.PoolWidth; x++ {
							if startY+y < p.InputHeight && startX+x < p.InputWidth {
								val := inputTensor.Data[d][startY+y][startX+x]
								if val > maxVal {
									maxVal = val
									maxY, maxX = y, x
								}
							}
						}
					}

					output.Data[d][outY][outX] = maxVal
					p.MaxIndices[d][outY][outX][0] = maxY
					p.MaxIndices[d][outY][outX][1] = maxX

				} else if p.Method == Average {
					// 平均池化
					sum := 0.0
					count := 0

					for y := 0; y < p.PoolHeight; y++ {
						for x := 0; x < p.PoolWidth; x++ {
							if startY+y < p.InputHeight && startX+x < p.InputWidth {
								sum += inputTensor.Data[d][startY+y][startX+x]
								count++
							}
						}
					}

					if count > 0 {
						output.Data[d][outY][outX] = sum / float64(count)
					}
				}
			}
		}
	}

	return output
}

// Backward 实现池化层的反向传播
func (p *PoolLayer) Backward(outputGradient interface{}, learningRate float64) interface{} {
	gradTensor, ok := outputGradient.(*Tensor3D)
	if !ok {
		panic("PoolLayer.Backward: outputGradient is not a *Tensor3D")
	}

	// 创建输入梯度
	inputGrad := NewTensor3D(p.InputDepth, p.InputHeight, p.InputWidth)

	// 对每个通道计算梯度
	for d := 0; d < p.OutputDepth; d++ {
		for outY := 0; outY < p.OutputHeight; outY++ {
			for outX := 0; outX < p.OutputWidth; outX++ {
				gradVal := gradTensor.Data[d][outY][outX]

				// 计算输入的起始位置
				startY := outY * p.Stride
				startX := outX * p.Stride

				if p.Method == Max {
					// 最大池化的梯度传递
					maxY := p.MaxIndices[d][outY][outX][0]
					maxX := p.MaxIndices[d][outY][outX][1]

					inputGrad.Data[d][startY+maxY][startX+maxX] += gradVal

				} else if p.Method == Average {
					// 平均池化的梯度传递
					count := 0
					for y := 0; y < p.PoolHeight; y++ {
						for x := 0; x < p.PoolWidth; x++ {
							if startY+y < p.InputHeight && startX+x < p.InputWidth {
								count++
							}
						}
					}

					if count > 0 {
						avgGrad := gradVal / float64(count)
						for y := 0; y < p.PoolHeight; y++ {
							for x := 0; x < p.PoolWidth; x++ {
								if startY+y < p.InputHeight && startX+x < p.InputWidth {
									inputGrad.Data[d][startY+y][startX+x] += avgGrad
								}
							}
						}
					}
				}
			}
		}
	}

	return inputGrad
}

// GetType 返回层的类型
func (p *PoolLayer) GetType() string {
	return "Pooling"
}

// GetOutputShape 返回输出形状
func (p *PoolLayer) GetOutputShape() []int {
	return []int{p.OutputDepth, p.OutputHeight, p.OutputWidth}
}
