from sklearn.decomposition import PCA
from sklearn.preprocessing import StandardScaler
import numpy as np

class FeatureProcessor:
    def __init__(self, n_components=50):
        self.pca = PCA(n_components=n_components)
        self.scaler = StandardScaler()
        
    def process_features(self, static_feat, dynamic_feat):
        """特征融合与降维"""
        # 静态特征向量化
        static_vec = self._vectorize_static(static_feat)
        
        # 动态特征向量化
        dynamic_vec = self._vectorize_dynamic(dynamic_feat)
        
        # 特征拼接
        combined = np.concatenate([static_vec, dynamic_vec], axis=1)
        
        # 标准化和降维
        scaled = self.scaler.fit_transform(combined)
        return self.pca.fit_transform(scaled)
    
    def _vectorize_static(self, features):
        """静态特征向量化"""
        # 实现实际的特征编码逻辑
        return np.array([
            len(features['sections']),
            features['header']['machine'],
            features['imports']['api_count']
        ])
    
    def _vectorize_dynamic(self, features):
        """动态特征向量化"""
        return np.array([
            features['risk_score'],
            len(features['api_sequence']),
            features['category_counts']['Registry']
        ]) 