dellabee7 commited on
Commit
9da9b54
ยท
verified ยท
1 Parent(s): 50f0aa2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -14
app.py CHANGED
@@ -1,6 +1,7 @@
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  from PyPDF2 import PdfReader
 
4
 
5
  # PDF ํ…์ŠคํŠธ ๋ฏธ๋ฆฌ ์ฝ์–ด์˜ค๊ธฐ
6
  def extract_pdf_text(pdf_paths):
@@ -13,34 +14,41 @@ def extract_pdf_text(pdf_paths):
13
  full_text += text + "\n"
14
  return full_text.strip()
15
 
16
- # ์‚ฌ์ „ ์ •์˜๋œ ๋ ˆํผ๋Ÿฐ์Šค ๋ฌธ์„œ๋“ค
17
  pdf_context = extract_pdf_text([
18
  "assets/Programming-Fundamentals-1570222270.pdf",
19
  "assets/1๋ถ„ํŒŒ์ด์ฌ_๊ฐ•์˜์ž๋ฃŒ_์ „์ฒด.pdf"
20
  ])
21
 
22
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
 
 
 
 
23
 
24
  def respond(message, history, system_message, max_tokens, temperature, top_p):
25
- # ์‚ฌ์šฉ์ž ์ž…๋ ฅ + ๋ ˆํผ๋Ÿฐ์Šค ๋ฌธ์„œ๋ฅผ ๊ฒฐํ•ฉ
26
- messages = [
27
- {"role": "system", "content": system_message},
28
- {"role": "user", "content": f"์•„๋ž˜๋Š” ํŒŒ์ด์ฌ ํ”„๋กœ๊ทธ๋ž˜๋ฐ API ๋ ˆํผ๋Ÿฐ์Šค์ž…๋‹ˆ๋‹ค:\n{pdf_context}\n\n์งˆ๋ฌธ: {message}"}
29
- ]
30
-
31
  for user_msg, bot_msg in history:
32
  if user_msg:
33
  messages.append({"role": "user", "content": user_msg})
34
  if bot_msg:
35
  messages.append({"role": "assistant", "content": bot_msg})
36
 
 
 
 
 
 
 
37
  response = ""
38
  for chunk in client.chat_completion(
39
- messages,
40
  max_tokens=max_tokens,
41
- stream=True,
42
  temperature=temperature,
43
  top_p=top_p,
 
44
  ):
45
  delta = chunk.choices[0].delta.content
46
  if delta:
@@ -50,13 +58,13 @@ def respond(message, history, system_message, max_tokens, temperature, top_p):
50
  demo = gr.ChatInterface(
51
  fn=respond,
52
  additional_inputs=[
53
- gr.Textbox(value="You are a friendly chatbot that answers questions based on the given document.", label="System message"),
54
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
55
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
56
- gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)"),
57
  ],
58
- title="๐Ÿ“˜ ํŒŒ์ด์ฌ API ๋ ˆํผ๋Ÿฐ์Šค ์ฑ—๋ด‡",
59
- description="ํ•œ๊ตญ๊ณต๋Œ€ ์ˆ˜์—…์ž๋ฃŒ ๊ธฐ๋ฐ˜ ์ฑ—๋ด‡์ž…๋‹ˆ๋‹ค. ์งˆ๋ฌธ์„ ์ž…๋ ฅํ•ด ๋ณด์„ธ์š”!"
60
  )
61
 
62
  if __name__ == "__main__":
 
1
  import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  from PyPDF2 import PdfReader
4
+ import os
5
 
6
  # PDF ํ…์ŠคํŠธ ๋ฏธ๋ฆฌ ์ฝ์–ด์˜ค๊ธฐ
7
  def extract_pdf_text(pdf_paths):
 
14
  full_text += text + "\n"
15
  return full_text.strip()
16
 
17
+ # ๋ฏธ๋ฆฌ ์ง€์ •๋œ PDF ๋ฌธ์„œ๋“ค
18
  pdf_context = extract_pdf_text([
19
  "assets/Programming-Fundamentals-1570222270.pdf",
20
  "assets/1๋ถ„ํŒŒ์ด์ฌ_๊ฐ•์˜์ž๋ฃŒ_์ „์ฒด.pdf"
21
  ])
22
 
23
+ # Inference Client ์„ค์ • - ๋ชจ๋ธ ๋ณ€๊ฒฝ๋จ
24
+ client = InferenceClient(
25
+ model="mistralai/Mistral-7B-Instruct-v0.1",
26
+ token=os.getenv("HUGGINGFACEHUB_API_TOKEN") # ๋ฐ˜๋“œ์‹œ ๋“ฑ๋ก ํ•„์š”
27
+ )
28
 
29
  def respond(message, history, system_message, max_tokens, temperature, top_p):
30
+ messages = [{"role": "system", "content": system_message}]
31
+
32
+ # history ๊ธฐ๋ฐ˜ message ๊ตฌ์„ฑ
 
 
 
33
  for user_msg, bot_msg in history:
34
  if user_msg:
35
  messages.append({"role": "user", "content": user_msg})
36
  if bot_msg:
37
  messages.append({"role": "assistant", "content": bot_msg})
38
 
39
+ # ๋ฌธ์„œ ๊ธฐ๋ฐ˜ ์งˆ๋ฌธ ๊ตฌ์„ฑ
40
+ messages.append({
41
+ "role": "user",
42
+ "content": f"๋‹ค์Œ์€ ํŒŒ์ด์ฌ ํ”„๋กœ๊ทธ๋ž˜๋ฐ ๋ฌธ์„œ์ž…๋‹ˆ๋‹ค:\n\n{pdf_context}\n\n์งˆ๋ฌธ: {message}"
43
+ })
44
+
45
  response = ""
46
  for chunk in client.chat_completion(
47
+ messages=messages,
48
  max_tokens=max_tokens,
 
49
  temperature=temperature,
50
  top_p=top_p,
51
+ stream=True,
52
  ):
53
  delta = chunk.choices[0].delta.content
54
  if delta:
 
58
  demo = gr.ChatInterface(
59
  fn=respond,
60
  additional_inputs=[
61
+ gr.Textbox(value="You are a helpful assistant answering based on the programming reference.", label="System message"),
62
  gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
63
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
64
+ gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p"),
65
  ],
66
+ title="๐Ÿ“˜ ํŒŒ์ด์ฌ API ๋ ˆํผ๋Ÿฐ์Šค ์ฑ—๋ด‡ (Mistral ๊ธฐ๋ฐ˜)",
67
+ description="ํ•œ๊ตญ๊ณต๋Œ€ ์ˆ˜์—…์ž๋ฃŒ ๊ธฐ๋ฐ˜์œผ๋กœ ์งˆ๋ฌธ์— ๋‹ต๋ณ€ํ•˜๋Š” ์ฑ—๋ด‡์ž…๋‹ˆ๋‹ค."
68
  )
69
 
70
  if __name__ == "__main__":