amaan8429 commited on
Commit
2b8680e
1 Parent(s): d29e0a2

trying to fix something

Browse files
Files changed (1) hide show
  1. app.py +2 -4
app.py CHANGED
@@ -4,7 +4,6 @@ from langchain_anthropic import ChatAnthropic
4
  from langchain_core.prompts import ChatPromptTemplate
5
  import streamlit as st
6
  from langchain_core.output_parsers import StrOutputParser
7
-
8
  # Load the API key from the environment
9
  load_dotenv()
10
 
@@ -12,7 +11,7 @@ from pydantic import SecretStr
12
 
13
  # Check if the API keys are set
14
  claude_api_key = os.getenv("CLAUDE_API_KEY")
15
- claude_api_key = SecretStr(claude_api_key) if claude_api_key is not None else None #convert the API key to SecretStr
16
  langchain_api_key = os.getenv("LANGCHAIN_API_KEY")
17
  langchain_project = os.getenv("LANGCHAIN_PROJECT")
18
 
@@ -48,13 +47,12 @@ st.subheader("Output")
48
  # Function to get output from LLM
49
  def Output_From_LLM(input_text):
50
  output_parser = StrOutputParser()
51
- llm = ChatAnthropic(anthropic_api_key=claude_api_key,model_name="claude-3-sonnet-20240229", temperature=0.2, max_tokens_to_sample=1024)
52
  chain = prompt | llm | output_parser
53
  result = chain.invoke({"input": input_text})
54
  return result
55
 
56
 
57
-
58
  # Display the output
59
  if submit:
60
  result = Output_From_LLM(input_text)
 
4
  from langchain_core.prompts import ChatPromptTemplate
5
  import streamlit as st
6
  from langchain_core.output_parsers import StrOutputParser
 
7
  # Load the API key from the environment
8
  load_dotenv()
9
 
 
11
 
12
  # Check if the API keys are set
13
  claude_api_key = os.getenv("CLAUDE_API_KEY")
14
+ # claude_api_key = SecretStr(claude_api_key) if claude_api_key is not None else None #convert the API key to SecretStr
15
  langchain_api_key = os.getenv("LANGCHAIN_API_KEY")
16
  langchain_project = os.getenv("LANGCHAIN_PROJECT")
17
 
 
47
  # Function to get output from LLM
48
  def Output_From_LLM(input_text):
49
  output_parser = StrOutputParser()
50
+ llm = ChatAnthropic(anthropic_api_key=claude_api_key,model_name="claude-3-sonnet-20240229", temperature=0.2, max_tokens_to_sample=1024) # type: ignore
51
  chain = prompt | llm | output_parser
52
  result = chain.invoke({"input": input_text})
53
  return result
54
 
55
 
 
56
  # Display the output
57
  if submit:
58
  result = Output_From_LLM(input_text)