Rams901's picture
Duplicate from Rams901/Cicero-QA-themes
734db66
raw history blame
No virus
1.27 kB
from langchain.llms.base import LLM
from typing import Optional, List, Mapping, Any
import anthropic
from urllib.parse import urlparse
import os
class ClaudeLLM(LLM):
@property
def _llm_type(self) -> str:
return "custom"
def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
client = anthropic.Client(os.environ['ANTHROPIC_KEY'])
# How about the formatted prompt?
prompt_formatted = (
f"{anthropic.HUMAN_PROMPT}{prompt}\n{anthropic.AI_PROMPT}"
)
response = client.completion(
prompt=prompt_formatted,
stop_sequences=[anthropic.HUMAN_PROMPT],
model="claude-instant-v1-100k",
max_tokens_to_sample=100000,
temperature=0.3,
)
return response["completion"]
@property
def _identifying_params(self) -> Mapping[str, Any]:
"""Get the identifying parameters."""
return {
}
def remove_numbers(question):
return question.translate(str.maketrans('', '', '0123456789'))
def extract_website_name(url):
parsed_url = urlparse(url)
if parsed_url.netloc.startswith("www."):
return parsed_url.netloc.split("www.")[1].split(".")[0]
return parsed_url.netloc.split(".")[0]