ahmfzui commited on
Commit
85f6bb9
1 Parent(s): abec9f0

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -45
app.py DELETED
@@ -1,45 +0,0 @@
1
- import os
2
- from huggingface_hub import InferenceClient
3
- import streamlit as st
4
-
5
- # Access your Hugging Face API token from the environment variable
6
- api_token = os.getenv("HUGGINGFACEHUB_API_TOKEN")
7
- if api_token is None:
8
- st.error("Hugging Face API token is not set.")
9
- else:
10
- st.title("Tanya Gizi!")
11
-
12
- # Initialize chat history if not already present
13
- if 'messages' not in st.session_state:
14
- st.session_state.messages = []
15
-
16
- # Display chat history
17
- for message in st.session_state.messages:
18
- st.chat_message(message['role']).markdown(message['content'])
19
-
20
- # Input area for the user
21
- prompt = st.chat_input('Masukan pertanyaanmu di sini!')
22
-
23
- # Process user input
24
- if prompt:
25
- st.chat_message('user').markdown(prompt)
26
- st.session_state.messages.append({'role': 'user', 'content': prompt})
27
-
28
- # Generate a response using InferenceClient
29
- client = InferenceClient(
30
- model="mistralai/Mistral-Large-Instruct-2407",
31
- token=api_token
32
- )
33
-
34
- # Generating response
35
- response = client.chat_completion(
36
- messages=[{"role": "user", "content": prompt}],
37
- max_tokens=100,
38
- stream=False # Disable streaming as it's not supported
39
- )
40
-
41
- response_text = response['choices'][0]['message']['content']
42
-
43
- # Display and store the assistant's response
44
- st.chat_message('assistant').markdown(response_text)
45
- st.session_state.messages.append({'role': 'assistant', 'content': response_text})