"""
寻找最优 K 值的 KNN 模型，并输出：
1) 控制台进度条（tqdm）与最终最佳准确率/最佳 K 值；
2) 保存准确率-随 K 变化的曲线图为 accuracy_plot.pdf；
3) 将最佳 KNN 模型序列化保存为 best_knn_model.pkl。

注意：
- 你的最佳 K 值与准确率不必与示例完全一致；
- 为了可复现，train_test_split 设置了 random_state；
- 使用的是 sklearn 自带的 digits(8x8) 数据集。
"""

from __future__ import annotations

import pickle
from pathlib import Path

import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets
from sklearn.metrics import accuracy_score
from sklearn.model_selection import StratifiedKFold, cross_val_score, train_test_split
from sklearn.neighbors import KNeighborsClassifier
from tqdm import tqdm


def main() -> None:
	# 工作目录使用当前文件所在目录，确保输出文件落在项目根目录
	out_dir = Path(__file__).resolve().parent

	# 1) 加载 digits 数据集
	digits = datasets.load_digits()
	X, y = digits.data, digits.target

	# 2) 训练/测试划分（固定 random_state，可复现）
	X_train, X_test, y_train, y_test = train_test_split(
		X, y, test_size=0.2, random_state=42, stratify=y
	)

	# 3) 在训练集上做 5 折分层交叉验证选择 K，避免因单次划分而偏向 K=1
	k_values = list(range(1, 41))
	skf = StratifiedKFold(n_splits=5, shuffle=True, random_state=42)
	cv_means: list[float] = []
	best_mean = -1.0
	best_k = None

	for k in tqdm(k_values, desc="Selecting K via 5-fold CV", unit="k"):
		model = KNeighborsClassifier(n_neighbors=k)
		scores = cross_val_score(model, X_train, y_train, cv=skf, scoring="accuracy")
		mean_acc = float(np.mean(scores))
		cv_means.append(mean_acc)
		# 若均值相等，保留较小的 k（更简单的模型）
		if mean_acc > best_mean - 1e-12:
			if mean_acc > best_mean + 1e-12 or best_k is None or k < best_k:
				best_mean = mean_acc
				best_k = k

	assert best_k is not None

	# 4) 用最优 K 在整个训练集上拟合，并在测试集评估
	best_model = KNeighborsClassifier(n_neighbors=best_k)
	best_model.fit(X_train, y_train)
	y_pred = best_model.predict(X_test)
	test_acc = accuracy_score(y_test, y_pred)

	# 5) 绘图（使用 CV 均值曲线）并保存为 PDF
	plt.figure(figsize=(8, 4.8))
	plt.plot(k_values, cv_means, marker="o", label="CV mean accuracy")
	plt.xlabel("k value")
	plt.ylabel("accuracy")
	plt.title("Accuracy of different k values (5-fold CV)")
	plt.grid(True, linestyle=":", alpha=0.5)

	# 垂直红线与标注最佳点
	plt.axvline(best_k, color="red", linestyle="--", linewidth=1.2)
	best_y = cv_means[best_k - 1]
	plt.scatter([best_k], [best_y], color="red", zorder=5)
	plt.text(
		best_k + 0.5,
		best_y,
		f"k={best_k}, Acc={best_y:.2f}",
		color="red",
		fontsize=10,
		va="bottom",
	)

	plt.tight_layout()
	pdf_path = out_dir / "accuracy_plot.pdf"
	# 在 Windows 上若文件正被查看器占用，保存可能触发 PermissionError；做一次容错改名。
	try:
		plt.savefig(pdf_path, format="pdf")
	except PermissionError:
		for i in range(1, 100):
			alt = out_dir / f"accuracy_plot ({i}).pdf"
			try:
				plt.savefig(alt, format="pdf")
				pdf_path = alt
				break
			except PermissionError:
				continue
	plt.close()

	# 6) 保存最佳模型为 pickle
	pkl_path = out_dir / "best_knn_model.pkl"
	with open(pkl_path, "wb") as f:
		pickle.dump(best_model, f)

	# 7) 控制台输出最佳结果（包含 CV 与测试集精度）
	print(f"Best K (5-fold CV): {best_k}")
	print(f"CV mean accuracy: {best_mean:.4f}")
	print(f"Test accuracy: {test_acc:.4f}")
	print(f"Saved: {pdf_path.name}, {pkl_path.name}")


if __name__ == "__main__":
	main()