File size: 1,061 Bytes
cdb6b83
b895edb
 
 
cdb6b83
 
b895edb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cdb6b83
 
b895edb
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import streamlit as st
from langchain import LLMChain
from langchain.chat_models import HuggingFaceHub
from langchain.prompts import ChatPromptTemplate
import os

# Initialize HuggingFaceHub LLM with access token from environment variables
llm = HuggingFaceHub(
    repo_id="meta-llama/Llama-2-7b-chat-hf",
    huggingfacehub_api_token=os.getenv("HUGGINGFACE_API_KEY"),
    model_kwargs={
        "temperature": 0.7,
        "max_new_tokens": 512,
    }
)

# Define the prompt template
prompt = ChatPromptTemplate.from_messages(
    [
        ("system", "You are a helpful assistant."),
        ("user", "Question: {question}")
    ]
)

# Create the LLM Chain
chain = LLMChain(llm=llm, prompt=prompt, output_key="response")

# Streamlit App Interface
st.title('LangChain Demo with LLaMA 2 on Hugging Face')

# User input
input_text = st.text_input("Enter your question:")

# Display the response
if input_text:
    try:
        response = chain.run({"question": input_text})
        st.write(response)
    except Exception as e:
        st.error(f"Error: {e}")