import unittest
from typing import override

from src.pipelines import Sequential
from src.processors import Processor, TokenizedDocumentStage
from src.unmarshalers import DocumentStage


class FakeTokenizationProcessor(Processor[DocumentStage, TokenizedDocumentStage]):
    @override
    def process(self, stage: DocumentStage) -> TokenizedDocumentStage:
        tokenized_documents: list[list[str]] = []
        for document in stage.documents:
            tokenized_documents.append(document.split())
        return TokenizedDocumentStage(tokenized_documents)


class TestTokenizationProcessor(unittest.TestCase):
    def test_tokenization_processor(self):
        stage = DocumentStage(["hello world"])
        pipeline = Sequential(
            FakeTokenizationProcessor(),
            lambda stage: stage.tokenized_documents,
        )
        self.assertEqual(pipeline(stage), [["hello", "world"]])
