from functools import lru_cache from transformers import pipeline, Pipeline @lru_cache def init_model(task: str, model: str = None) -> Pipeline: pipe = pipeline( task=task, model=model ) return pipe def custom_predict(text: str, pipe: Pipeline): result = pipe(text) # result: [{'label': 'POSITIVE', 'score': 0.998}] return result