Illia56 commited on
Commit
4de1eb8
1 Parent(s): 5f41beb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -10
app.py CHANGED
@@ -90,14 +90,20 @@ def transcribe_video(youtube_url: str, path: str) -> List[Document]:
90
  result = client.predict(youtube_url, "translate", True, fn_index=7)
91
  return [Document(page_content=result[1], metadata=dict(page=1))]
92
 
93
- def predict(
94
- message: str, system_prompt: str = "", model_url: str = models["Llama2-70b"]["chat_link"]
95
- ) -> Any:
96
  """
97
  Predict a response using a client.
98
  """
99
- client = Client(model_url)
100
- response = client.predict(message, system_prompt, 0.7, 4096, 0.5, 1.2, api_name=("/chat_1" if model_url== models["Llama2-70b"]["chat_link"] else '/chat'))
 
 
 
 
 
 
 
101
  return response
102
 
103
  PATH = os.path.join(os.path.expanduser("~"), "Data")
@@ -114,11 +120,9 @@ def initialize_session_state():
114
 
115
  def sidebar():
116
  with st.sidebar:
117
- st.sidebar.image('qr.jpg')
118
  st.markdown("# 💸 **Support our project**")
119
  st.markdown("This money would be used for paying for API and supporting our team.")
120
- st.markdown("🎯 **Goal**: 600 $")
121
- st.markdown("[🔗 Link to the bank](https://send.monobank.ua/jar/4mvqDivxmP)")
122
 
123
  st.markdown("")
124
 
@@ -135,7 +139,6 @@ def sidebar():
135
 
136
  # Embed the video
137
  st.markdown(embed_html, unsafe_allow_html=True)
138
-
139
  sidebar()
140
 
141
  initialize_session_state()
@@ -162,7 +165,7 @@ class LlamaLLM(LLM):
162
 
163
  def _call(self, prompt: str, stop: Optional[List[str]] = None, run_manager: Optional[CallbackManagerForLLMRun] = None) -> str:
164
  model_link = models[st.session_state.model_choice]["chat_link"]
165
- response = predict(prompt, model_url=model_link)
166
  return response
167
 
168
  @property
 
90
  result = client.predict(youtube_url, "translate", True, fn_index=7)
91
  return [Document(page_content=result[1], metadata=dict(page=1))]
92
 
93
+ def predict(message: str, system_prompt: str = '', temperature: float = 0.7, max_new_tokens: int = 1024,
94
+ topp: float = 0.5, repetition_penalty: float = 1.2) -> Any:
 
95
  """
96
  Predict a response using a client.
97
  """
98
+ client = Client("https://osanseviero-mistral-super-fast.hf.space/")
99
+ response = client.predict(
100
+ message,
101
+ temperature,
102
+ max_new_tokens,
103
+ topp,
104
+ repetition_penalty,
105
+ api_name="/chat"
106
+ )
107
  return response
108
 
109
  PATH = os.path.join(os.path.expanduser("~"), "Data")
 
120
 
121
  def sidebar():
122
  with st.sidebar:
 
123
  st.markdown("# 💸 **Support our project**")
124
  st.markdown("This money would be used for paying for API and supporting our team.")
125
+ st.markdown("[🔗 Link](https://send.monobank.ua/jar/4mvqDivxmP)")
 
126
 
127
  st.markdown("")
128
 
 
139
 
140
  # Embed the video
141
  st.markdown(embed_html, unsafe_allow_html=True)
 
142
  sidebar()
143
 
144
  initialize_session_state()
 
165
 
166
  def _call(self, prompt: str, stop: Optional[List[str]] = None, run_manager: Optional[CallbackManagerForLLMRun] = None) -> str:
167
  model_link = models[st.session_state.model_choice]["chat_link"]
168
+ response = predict(prompt)
169
  return response
170
 
171
  @property