import streamlit as st from huggingface_hub import InferenceApi import os import requests api_token = os.getenv('HF_API_TOKEN') API_URL = "https://api-inference.huggingface.co/models/sadiji/acadgpt" headers = {"Authorization": "Bearer "+os.getenv('HF_API_TOKEN')} def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() st.title("AcademiaGPT") # Initialize session state to store chat history if 'history' not in st.session_state: st.session_state.history = [] # User input user_input = st.text_input("You: ", "") if user_input: # Add user message to chat history st.session_state.history.append({"role": "user", "content": user_input}) # Generate a response from the model output =query({ "inputs": user_input, "parameters": { "max_length": 100, # Increase this value to generate longer responses "temperature": 0.7, # Optional: Adjust for more randomness in the output "top_p": 0.9, # Optional: Adjust for nucleus sampling "top_k": 30 # Optional: Adjust for top-k sampling } }) # Extract the generated text try: bot_response = output[0] except Exception: bot_response = str(Exception) # Add bot response to chat history st.session_state.history.append({"role": "bot", "content": bot_response}) # Display chat history for message in st.session_state.history: if message["role"] == "user": st.write(f"You: {message['content']}") else: st.write(f"Bot: {message['content']}")