|
from transformers import AutoModelForCausalLM, AutoTokenizer, TextGenerationPipeline |
|
|
|
class HuggingFaceHandler: |
|
def __init__(self, model_dir): |
|
""" |
|
Initialize the handler with the model directory. |
|
""" |
|
|
|
self.tokenizer = AutoTokenizer.from_pretrained(model_dir) |
|
|
|
|
|
self.model = AutoModelForCausalLM.from_pretrained(model_dir) |
|
|
|
|
|
self.pipeline = TextGenerationPipeline(model=self.model, tokenizer=self.tokenizer, framework='pt') |
|
|
|
def __call__(self, subject): |
|
""" |
|
Generate a math course based on the given subject. |
|
""" |
|
|
|
generated_text = self.pipeline(subject, max_length=500) |
|
|
|
return generated_text |
|
|
|
|
|
model_path = "KakaoL0L/Mistral7B_MatheoAI_lora" |
|
|
|
|
|
handler = HuggingFaceHandler(model_path) |