Spaces:
Sleeping
Sleeping
File size: 2,199 Bytes
a4c1a10 6f62bea 530939d b5d2618 530939d 6f62bea b5d2618 6f62bea 530939d 6f62bea b5d2618 530939d 6f62bea b5d2618 6f62bea b5d2618 6f62bea b5d2618 5ca1aef b5d2618 6f62bea b5d2618 6f62bea b5d2618 6f62bea |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 |
# importing genai library from google
from google import genai
from openai import OpenAI
import streamlit as st
import os
from huggingface_hub import InferenceClient
# from huggingface_hub import HfFolder
# creating client for HuggingFace
clientHuggingFace = InferenceClient(
api_key = os.getenv("HUGGINGFACE_API_KEY")
)
# creating client for OpenAI
clientOpenAI = OpenAI(
api_key=os.getenv('OPENAI_API_KEY')
)
# creating client for gemini
clientGemini = genai.Client(api_key=os.getenv('GEMINI_API_KEY'))
# Function to return response
def load_answer_openAI(question):
completion = clientOpenAI.chat.completions.create(
model="gpt-4o-mini",
store=True,
messages=[
{"role": "user", "content": question}
]
)
return completion.choices[0].message.content
def load_answer_Gemini(question):
response = clientGemini.models.generate_content(
model="gemini-2.0-flash",
contents=question,
)
return response.text
def load_answer_HuggingFace(question, modelName):
res = clientHuggingFace.chat.completions.create(
model = modelName,
messages=[
{"role": "user", "content": question}
]
)
return res.choices[0].message.content
# def load_answer_huggingface(question):
# App UI
st.set_page_config(page_title="Langchain Demo", page_icon=":robot:")
st.header("Langchain Demo")
def get_text():
input_text = st.text_input("Question: ", key="input")
return input_text
def get_modelName():
input_model = st.text_input("Model Name for HuggingFace: ", key = "input1")
return input_model
user_input = get_text()
submitGPT = st.button("Generate with ChatGPT")
submitGemini = st.button("Generate with Gemini")
providedModelName = get_modelName()
submitHuggingFace = st.button("Generate with HuggingFace")
if submitGPT:
response = load_answer_openAI(user_input)
st.subheader("Answer: ")
st.write(response)
elif submitGemini:
response = load_answer_Gemini(user_input)
st.subheader("Answer: ")
st.write(response)
elif submitHuggingFace:
response = load_answer_HuggingFace(user_input, providedModelName)
st.subheader("Answer: ")
st.write(response)
|