import torch
import json

class Predictor:
    def __init__(self, model, tokenizer, model_path, device='cuda'):
        self.model = model
        self.tokenizer = tokenizer
        self.device = device
        
        # Load model weights
        self.model.load_state_dict(torch.load(model_path, map_location=self.device))
        self.model.to(self.device)
        self.model.eval()
    
    def predict(self, text, max_length=512):
        inputs = self.tokenizer(
            text,
            max_length=max_length,
            padding='max_length',
            truncation=True,
            return_tensors='pt'
        ).to(self.device)
        
        with torch.no_grad():
            outputs = self.model(**inputs)
        
        logits = outputs.logits
        probabilities = torch.nn.functional.softmax(logits, dim=1)
        predicted_label = torch.argmax(probabilities, dim=1).item()
        confidence = torch.max(probabilities).item()
        
        return predicted_label, confidence
    
    def predict_batch(self, texts, max_length=512):
        inputs = self.tokenizer(
            texts,
            max_length=max_length,
            padding='max_length',
            truncation=True,
            return_tensors='pt'
        ).to(self.device)
        
        with torch.no_grad():
            outputs = self.model(**inputs)
        
        probabilities = torch.nn.functional.softmax(outputs.logits, dim=1)
        predicted_labels = torch.argmax(probabilities, dim=1).tolist()
        
        return predicted_labels
    
    def predict_file(self, input_file, output_file, max_length=512):
        results = []
        
        with open(input_file, 'r', encoding='utf-8') as f:
            for line in f:
                item = json.loads(line)
                text = item['content']
                predicted_label, confidence = self.predict(text, max_length)
                
                result_item = {
                    'id': item['id'],
                    'label': predicted_label,
                    'confidence': confidence
                }
                
                results.append(result_item)
        
        with open(output_file, 'w', encoding='utf-8') as f:
            json.dump(results, f, ensure_ascii=False, indent=2)
        
        print(f'Prediction completed. Results saved to {output_file}')