#!/usr/bin/env python3
"""
Example usage of multi-head attention mechanism in Tacotron2 Chinese speech synthesis
"""

import os
import sys

def print_configuration_examples():
    """Print example configurations for different scenarios"""
    
    print("=" * 60)
    print("Multi-Head Attention Configuration Examples")
    print("=" * 60)
    
    print("\n1. 推荐配置 (Recommended Configuration)")
    print("-" * 40)
    print("适用于大多数中文语音合成任务:")
    print("--hparams='use_multihead_attention=True,num_attention_heads=8,attention_dim=128,attention_type=location_sensitive_multihead,attention_dropout_rate=0.1'")
    
    print("\n2. 高性能配置 (High Performance)")
    print("-" * 40)
    print("适用于GPU内存充足的情况:")
    print("--hparams='use_multihead_attention=True,num_attention_heads=16,attention_dim=256,attention_type=location_sensitive_multihead,attention_dropout_rate=0.1'")
    
    print("\n3. 内存优化配置 (Memory Optimized)")
    print("-" * 40)
    print("适用于GPU内存有限的情况:")
    print("--hparams='use_multihead_attention=True,num_attention_heads=4,attention_dim=64,attention_type=location_sensitive_multihead,attention_dropout_rate=0.2'")
    
    print("\n4. 训练稳定性配置 (Training Stability)")
    print("-" * 40)
    print("适用于训练不稳定的情况:")
    print("--hparams='use_multihead_attention=True,num_attention_heads=8,attention_dim=128,attention_type=location_sensitive_multihead,attention_dropout_rate=0.3,tacotron_initial_learning_rate=5e-4'")
    
    print("\n5. 标准多头注意力 (Standard Multi-Head)")
    print("-" * 40)
    print("使用标准的Transformer风格多头注意力:")
    print("--hparams='use_multihead_attention=True,num_attention_heads=8,attention_dim=128,attention_type=multihead,attention_dropout_rate=0.1'")
    
    print("\n6. 回退到原始注意力 (Fallback to Original)")
    print("-" * 40)
    print("如果遇到问题，可以回退到原始注意力机制:")
    print("--hparams='use_multihead_attention=False'")

def print_training_commands():
    """Print example training commands"""
    
    print("\n" + "=" * 60)
    print("Training Commands Examples")
    print("=" * 60)
    
    print("\n1. 基础训练命令 (Basic Training)")
    print("-" * 40)
    print("python train.py --model='Tacotron' --hparams='use_multihead_attention=True,num_attention_heads=8,attention_type=location_sensitive_multihead'")
    
    print("\n2. 完整配置训练 (Full Configuration)")
    print("-" * 40)
    print("python train.py --model='Tacotron' --hparams='use_multihead_attention=True,num_attention_heads=8,attention_dim=128,attention_type=location_sensitive_multihead,attention_dropout_rate=0.1,tacotron_batch_size=32'")
    
    print("\n3. 多GPU训练 (Multi-GPU Training)")
    print("-" * 40)
    print("python train.py --model='Tacotron' --hparams='tacotron_num_gpus=2,use_multihead_attention=True,num_attention_heads=8,attention_type=location_sensitive_multihead,tacotron_batch_size=64'")
    
    print("\n4. 从检查点恢复训练 (Resume from Checkpoint)")
    print("-" * 40)
    print("python train.py --model='Tacotron' --hparams='use_multihead_attention=True,num_attention_heads=8,attention_type=location_sensitive_multihead' --checkpoint=/path/to/checkpoint")

def print_synthesis_commands():
    """Print example synthesis commands"""
    
    print("\n" + "=" * 60)
    print("Synthesis Commands Examples")
    print("=" * 60)
    
    print("\n1. 基础合成命令 (Basic Synthesis)")
    print("-" * 40)
    print("python synthesize.py --model='Tacotron' --hparams='use_multihead_attention=True,num_attention_heads=8,attention_type=location_sensitive_multihead' --text='你好世界'")
    
    print("\n2. 批量合成 (Batch Synthesis)")
    print("-" * 40)
    print("python synthesize.py --model='Tacotron' --hparams='use_multihead_attention=True,num_attention_heads=8,attention_type=location_sensitive_multihead' --text_list=text_list.txt")
    
    print("\n3. 高质量合成 (High Quality Synthesis)")
    print("-" * 40)
    print("python synthesize.py --model='Tacotron' --hparams='use_multihead_attention=True,num_attention_heads=8,attention_type=location_sensitive_multihead,outputs_per_step=1' --text='这是一个高质量的语音合成测试'")

def print_monitoring_tips():
    """Print monitoring and debugging tips"""
    
    print("\n" + "=" * 60)
    print("Monitoring and Debugging Tips")
    print("=" * 60)
    
    print("\n1. 训练监控 (Training Monitoring)")
    print("-" * 40)
    print("• 监控注意力权重的分布")
    print("• 观察对齐质量的变化")
    print("• 检查损失函数的收敛性")
    print("• 关注梯度范数")
    
    print("\n2. 性能指标 (Performance Metrics)")
    print("-" * 40)
    print("• 对齐准确率 (Alignment Accuracy)")
    print("• 语音质量评分 (MOS)")
    print("• 训练速度 (Steps per second)")
    print("• 内存使用情况")
    
    print("\n3. 常见问题解决 (Troubleshooting)")
    print("-" * 40)
    print("• 如果训练不稳定: 增加attention_dropout_rate")
    print("• 如果内存不足: 减少num_attention_heads")
    print("• 如果对齐质量差: 检查attention_dim设置")
    print("• 如果收敛慢: 调整学习率")

def print_optimization_guide():
    """Print optimization guide"""
    
    print("\n" + "=" * 60)
    print("Optimization Guide for Chinese Speech Synthesis")
    print("=" * 60)
    
    print("\n1. 中文语音特点优化 (Chinese Speech Characteristics)")
    print("-" * 40)
    print("• 使用location_sensitive_multihead类型")
    print("• 保持cumulative_weights=True")
    print("• 适当调整attention_win_size")
    print("• 考虑中文声调信息")
    
    print("\n2. 超参数调优建议 (Hyperparameter Tuning)")
    print("-" * 40)
    print("• attention_dim: 64-256 (推荐128)")
    print("• num_attention_heads: 4-16 (推荐8)")
    print("• attention_dropout_rate: 0.1-0.3")
    print("• tacotron_initial_learning_rate: 1e-3 到 5e-4")
    
    print("\n3. 渐进式训练策略 (Progressive Training)")
    print("-" * 40)
    print("1. 先用单头注意力预训练")
    print("2. 切换到多头注意力微调")
    print("3. 逐步增加注意力头数")
    print("4. 最终优化超参数")

def main():
    """Main function to display all examples"""
    
    print("🎯 Multi-Head Attention Usage Examples for Tacotron2 Chinese Speech Synthesis")
    
    print_configuration_examples()
    print_training_commands()
    print_synthesis_commands()
    print_monitoring_tips()
    print_optimization_guide()
    
    print("\n" + "=" * 60)
    print("Quick Start Guide")
    print("=" * 60)
    print("\n1. 安装依赖: pip install tensorflow==1.15.0")
    print("2. 运行测试: python test_multihead_attention.py")
    print("3. 开始训练: python train.py --model='Tacotron' --hparams='use_multihead_attention=True,num_attention_heads=8,attention_type=location_sensitive_multihead'")
    print("4. 查看详细文档: MULTIHEAD_ATTENTION_README.md")
    
    print("\n🎉 祝您训练愉快！(Happy Training!)")

if __name__ == "__main__":
    main() 