"""
Data loading utilities for LLM experiments.
"""

import json
from pathlib import Path
from typing import List, Dict, Any


def load_text_file(file_path: str) -> str:
    """
    Load text from a file.
    
    Args:
        file_path: Path to the text file
        
    Returns:
        Content of the file as string
    """
    with open(file_path, 'r', encoding='utf-8') as f:
        return f.read()


def load_jsonl(file_path: str) -> List[Dict[str, Any]]:
    """
    Load data from a JSONL file.
    
    Args:
        file_path: Path to the JSONL file
        
    Returns:
        List of dictionaries
    """
    data = []
    with open(file_path, 'r', encoding='utf-8') as f:
        for line in f:
            data.append(json.loads(line.strip()))
    return data


def save_jsonl(data: List[Dict[str, Any]], file_path: str):
    """
    Save data to a JSONL file.
    
    Args:
        data: List of dictionaries to save
        file_path: Path to save the JSONL file
    """
    with open(file_path, 'w', encoding='utf-8') as f:
        for item in data:
            f.write(json.dumps(item, ensure_ascii=False) + '\n')


def tokenize_text(text: str, tokenizer=None) -> List[str]:
    """
    Simple tokenization function.
    
    Args:
        text: Input text
        tokenizer: Optional custom tokenizer
        
    Returns:
        List of tokens
    """
    if tokenizer:
        return tokenizer(text)
    # Simple whitespace tokenization as default
    return text.split()

