import os
import tensorflow as tf
from model.simsiam_tf import SimSiam
from utils.augment import simsiam_augment

# 整个训练流程（加载图像 → 增强 → 前向传播 → 损失计算 → 更新）

# 参数配置
IMAGE_SIZE = (640, 640)
BATCH_SIZE = 16
EPOCHS = 10
PROJECTION_DIM = 256
# 获取当前脚本的真实路径
THIS_FILE_DIR = os.path.dirname(os.path.abspath(__file__))

# 正确拼出项目根目录：tongue_classifier
PROJECT_ROOT = os.path.join(THIS_FILE_DIR, "..", "..", "tongue_classifier")
PROJECT_ROOT = os.path.abspath(PROJECT_ROOT)

# 目标数据路径
DATA_DIR = os.path.join(PROJECT_ROOT, "data", "train", "tongue")


# 加载图像路径
def load_image_paths(data_dir):
    return [os.path.join(data_dir, fname) for fname in os.listdir(data_dir) if fname.lower().endswith('.jpg')]

# 加载图像并增强成两个视图
def load_and_augment(img_path):
    img = tf.io.read_file(img_path)
    img = tf.image.decode_jpeg(img, channels=3)
    view1, view2 = simsiam_augment(img, image_size=IMAGE_SIZE)
    return view1, view2

# 构造 tf.data.Dataset
def build_dataset(img_paths):
    ds = tf.data.Dataset.from_tensor_slices(img_paths)
    ds = ds.map(load_and_augment, num_parallel_calls=tf.data.AUTOTUNE)
    ds = ds.shuffle(1000).batch(BATCH_SIZE).prefetch(tf.data.AUTOTUNE)
    return ds

# 负余弦损失
def negative_cosine_similarity(p, z):
    p = tf.math.l2_normalize(p, axis=1)
    z = tf.math.l2_normalize(z, axis=1)
    return -tf.reduce_mean(tf.reduce_sum(p * z, axis=1))

# 自定义训练逻辑
def train():
    image_paths = load_image_paths(DATA_DIR)
    dataset = build_dataset(image_paths)

    model = SimSiam(input_shape=(*IMAGE_SIZE, 3), projection_dim=PROJECTION_DIM)
    optimizer = tf.keras.optimizers.Adam(learning_rate=1e-3)

    for epoch in range(EPOCHS):
        total_loss = 0.0
        for step, (x1, x2) in enumerate(dataset):
            with tf.GradientTape() as tape:
                p1, z2, p2, z1 = model(x1, x2, training=True)
                loss = 0.5 * (negative_cosine_similarity(p1, tf.stop_gradient(z2)) +
                              negative_cosine_similarity(p2, tf.stop_gradient(z1)))
            grads = tape.gradient(loss, model.trainable_variables)
            optimizer.apply_gradients(zip(grads, model.trainable_variables))
            total_loss += loss.numpy()

        print(f"Epoch {epoch+1}/{EPOCHS} - Loss: {total_loss / (step + 1):.4f}")

    # 保存 encoder
    model.encoder.save('simsiam_encoder.h5')
    print("✅ Encoder saved as simsiam_encoder.h5")

if __name__ == '__main__':
    train()