File size: 775 Bytes
fe8dcb5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import abc

from .artifact import Artifact


class InferenceEngine(abc.ABC, Artifact):
    """Abstract base class for inference."""

    @abc.abstractmethod
    def infer(self, dataset):
        """Perform inference on the input dataset."""
        pass


class HFPipelineBasedInferenceEngine(Artifact):
    """Abstract base class for inference."""

    model_name: str
    max_new_tokens: int

    def prepare(self):
        from transformers import pipeline

        self.model = pipeline(model=self.model_name)

    def infer(self, dataset):
        return [
            output["generated_text"]
            for output in self.model(
                [instance["source"] for instance in dataset],
                max_new_tokens=self.max_new_tokens,
            )
        ]