File size: 1,280 Bytes
638f5cd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 |
from langchain.llms import HuggingFacePipeline
from langchain.chains import LLMChain
from langchain.prompts import PromptTemplate
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
class NameExtractorChain:
def __init__(self, model_name: str = "name-extraction"):
self.pipe = pipeline(
"text2text-generation",
model=model_name,
max_new_tokens=10,
model_kwargs={"temperature": 0}
)
self.llm = HuggingFacePipeline(pipeline=self.pipe)
self.prompt = PromptTemplate(
input_variables=["conversation"],
template="""Extract only the name of the person from this conversation.
If there's no name, return 'No name found'.
Conversation: {conversation}""")
self.chain = LLMChain(llm=self.llm, prompt=self.prompt)
def extract_name(self, text: str):
text=text.strip()
if len(text.split())==1:
text= "It's " + text
try:
output = self.chain.run(conversation=text)
return output
except Exception as e:
print(f"Error processing text: {str(e)}")
extractor = NameExtractorChain()
print(extractor.extract_name(sample-text))
|