class DomainAdaptationModels:
    def __init__(self, input_shape, num_classes):
        self.input_shape = input_shape
        self.num_classes = num_classes
        
    def build_dann_model(self):
        """构建域对抗神经网络(DANN)"""
        # 特征提取器
        def feature_extractor():
            inputs = Input(shape=self.input_shape)
            x = Conv1D(64, 3, activation='relu')(inputs)
            x = BatchNormalization()(x)
            x = MaxPooling1D(2)(x)
            x = Conv1D(128, 3, activation='relu')(x)
            x = BatchNormalization()(x)
            x = MaxPooling1D(2)(x)
            x = Conv1D(256, 3, activation='relu')(x)
            x = BatchNormalization()(x)
            x = GlobalAveragePooling1D()(x)
            return Model(inputs, x, name='feature_extractor')
        
        # 标签分类器
        def label_classifier():
            inputs = Input(shape=(256,))
            x = Dense(128, activation='relu')(inputs)
            x = Dropout(0.5)(x)
            x = Dense(64, activation='relu')(x)
            outputs = Dense(self.num_classes, activation='softmax', name='label_output')(x)
            return Model(inputs, outputs, name='label_classifier')
        
        # 域分类器
        def domain_classifier():
            inputs = Input(shape=(256,))
            x = Dense(128, activation='relu')(inputs)
            x = Dropout(0.5)(x)
            x = Dense(64, activation='relu')(x)
            outputs = Dense(1, activation='sigmoid', name='domain_output')(x)
            return Model(inputs, outputs, name='domain_classifier')
        
        # 构建完整模型
        feat_extractor = feature_extractor()
        label_clf = label_classifier()
        domain_clf = domain_classifier()
        
        # 源域输入
        source_input = Input(shape=self.input_shape, name='source_input')
        
        # 目标域输入
        target_input = Input(shape=self.input_shape, name='target_input')
        
        # 源域路径
        source_features = feat_extractor(source_input)
        source_label = label_clf(source_features)
        
        # 目标域路径
        target_features = feat_extractor(target_input)
        target_label = label_clf(target_features)
        
        # 域分类路径
        combined_features = tf.concat([source_features, target_features], axis=0)
        domain_pred = domain_clf(combined_features)
        
        return Model(
            inputs=[source_input, target_input],
            outputs=[source_label, target_label, domain_pred],
            name='dann_model'
        )
    
    def build_deep_coral_model(self):
        """构建Deep CORAL模型"""
        # 实现Deep CORAL损失
        def coral_loss(source_features, target_features):
            # 计算二阶统计量差异
            source_cov = tf.linalg.matmul(tf.transpose(source_features), source_features)
            target_cov = tf.linalg.matmul(tf.transpose(target_features), target_features)
            return tf.reduce_mean(tf.square(source_cov - target_cov))
        
        # 构建模型
        base_model = Sequential([
            Conv1D(64, 3, activation='relu', input_shape=self.input_shape),
            BatchNormalization(),
            MaxPooling1D(2),
            Conv1D(128, 3, activation='relu'),
            BatchNormalization(),
            MaxPooling1D(2),
            Conv1D(256, 3, activation='relu'),
            BatchNormalization(),
            GlobalAveragePooling1D(),
            Dense(128, activation='relu'),
            Dropout(0.5)
        ], name='feature_extractor')
        
        classifier = Dense(self.num_classes, activation='softmax', name='classifier')
        
        source_input = Input(shape=self.input_shape, name='source_input')
        target_input = Input(shape=self.input_shape, name='target_input')
        
        source_features = base_model(source_input)
        target_features = base_model(target_input)
        
        source_output = classifier(source_features)
        
        # 添加CORAL损失
        coral_loss_value = coral_loss(source_features, target_features)
        
        return Model(
            inputs=[source_input, target_input],
            outputs=[source_output, coral_loss_value],
            name='deep_coral_model'
        )

class TransferLearningTrainer:
    def __init__(self, model, model_type='dann'):
        self.model = model
        self.model_type = model_type
        self.history = None
        
    def compile_model(self, alpha=1.0):
        """编译迁移学习模型"""
        if self.model_type == 'dann':
            self.model.compile(
                optimizer=Adam(0.001),
                loss={
                    'label_output': 'categorical_crossentropy',
                    'domain_output': 'binary_crossentropy'
                },
                loss_weights={
                    'label_output': 1.0,
                    'domain_output': alpha
                },
                metrics={'label_output': ['accuracy']}
            )
        elif self.model_type == 'deep_coral':
            self.model.compile(
                optimizer=Adam(0.001),
                loss={
                    'classifier': 'categorical_crossentropy',
                    'coral_loss': lambda y_true, y_pred: y_pred
                },
                loss_weights={
                    'classifier': 1.0,
                    'coral_loss': 0.5
                },
                metrics={'classifier': ['accuracy']}
            )
    
    def train_dann(self, source_data, target_data, epochs=100, batch_size=32):
        """训练DANN模型"""
        X_source, y_source = source_data
        X_target, _ = target_data  # 目标域无标签
        
        # 准备域标签
        source_domain_labels = np.zeros((len(X_source), 1))
        target_domain_labels = np.ones((len(X_target), 1))
        
        domain_labels = np.concatenate([source_domain_labels, target_domain_labels], axis=0)
        
        callbacks = [
            EarlyStopping(patience=15, restore_best_weights=True),
            ReduceLROnPlateau(factor=0.5, patience=5)
        ]
        
        self.history = self.model.fit(
            [X_source, X_target],
            [y_source, y_source, domain_labels],  # 注意：这里使用伪标签
            epochs=epochs,
            batch_size=batch_size,
            callbacks=callbacks,
            verbose=1
        )
    
    def train_deep_coral(self, source_data, target_data, epochs=100, batch_size=32):
        """训练Deep CORAL模型"""
        X_source, y_source = source_data
        X_target, _ = target_data
        
        # 伪损失值
        coral_dummy = np.zeros((len(X_source), 1))
        
        self.history = self.model.fit(
            [X_source, X_target],
            [y_source, coral_dummy],
            epochs=epochs,
            batch_size=batch_size,
            verbose=1
        )

# 迁移学习示例
# 准备源域和目标域数据
X_source = X_train.reshape(-1, X_train.shape[1], 1)
y_source = y_train_cat


X_target = np.random.randn(100, X_source.shape[1], 1)  # 示例数据

# 构建和训练DANN模型
da_models = DomainAdaptationModels(input_shape, num_classes)
dann_model = da_models.build_dann_model()

dann_trainer = TransferLearningTrainer(dann_model, 'dann')
dann_trainer.compile_model(alpha=0.5)
dann_trainer.train_dann((X_source, y_source), (X_target, None), epochs=50)
