dellabee7 commited on
Commit
15d6c6c
ยท
verified ยท
1 Parent(s): 9da9b54

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +16 -34
app.py CHANGED
@@ -14,57 +14,39 @@ def extract_pdf_text(pdf_paths):
14
  full_text += text + "\n"
15
  return full_text.strip()
16
 
17
- # ๋ฏธ๋ฆฌ ์ง€์ •๋œ PDF ๋ฌธ์„œ๋“ค
18
  pdf_context = extract_pdf_text([
19
  "assets/Programming-Fundamentals-1570222270.pdf",
20
  "assets/1๋ถ„ํŒŒ์ด์ฌ_๊ฐ•์˜์ž๋ฃŒ_์ „์ฒด.pdf"
21
  ])
22
 
23
- # Inference Client ์„ค์ • - ๋ชจ๋ธ ๋ณ€๊ฒฝ๋จ
24
  client = InferenceClient(
25
- model="mistralai/Mistral-7B-Instruct-v0.1",
26
- token=os.getenv("HUGGINGFACEHUB_API_TOKEN") # ๋ฐ˜๋“œ์‹œ ๋“ฑ๋ก ํ•„์š”
27
  )
28
 
29
  def respond(message, history, system_message, max_tokens, temperature, top_p):
30
- messages = [{"role": "system", "content": system_message}]
31
-
32
- # history ๊ธฐ๋ฐ˜ message ๊ตฌ์„ฑ
33
- for user_msg, bot_msg in history:
34
- if user_msg:
35
- messages.append({"role": "user", "content": user_msg})
36
- if bot_msg:
37
- messages.append({"role": "assistant", "content": bot_msg})
38
-
39
- # ๋ฌธ์„œ ๊ธฐ๋ฐ˜ ์งˆ๋ฌธ ๊ตฌ์„ฑ
40
- messages.append({
41
- "role": "user",
42
- "content": f"๋‹ค์Œ์€ ํŒŒ์ด์ฌ ํ”„๋กœ๊ทธ๋ž˜๋ฐ ๋ฌธ์„œ์ž…๋‹ˆ๋‹ค:\n\n{pdf_context}\n\n์งˆ๋ฌธ: {message}"
43
- })
44
-
45
- response = ""
46
- for chunk in client.chat_completion(
47
- messages=messages,
48
- max_tokens=max_tokens,
49
  temperature=temperature,
50
- top_p=top_p,
51
- stream=True,
52
- ):
53
- delta = chunk.choices[0].delta.content
54
- if delta:
55
- response += delta
56
- yield response
57
 
58
  demo = gr.ChatInterface(
59
  fn=respond,
60
  additional_inputs=[
61
- gr.Textbox(value="You are a helpful assistant answering based on the programming reference.", label="System message"),
62
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
63
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
64
  gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p"),
65
  ],
66
- title="๐Ÿ“˜ ํŒŒ์ด์ฌ API ๋ ˆํผ๋Ÿฐ์Šค ์ฑ—๋ด‡ (Mistral ๊ธฐ๋ฐ˜)",
67
- description="ํ•œ๊ตญ๊ณต๋Œ€ ์ˆ˜์—…์ž๋ฃŒ ๊ธฐ๋ฐ˜์œผ๋กœ ์งˆ๋ฌธ์— ๋‹ต๋ณ€ํ•˜๋Š” ์ฑ—๋ด‡์ž…๋‹ˆ๋‹ค."
68
  )
69
 
70
  if __name__ == "__main__":
 
14
  full_text += text + "\n"
15
  return full_text.strip()
16
 
17
+ # ๋ ˆํผ๋Ÿฐ์Šค PDF ํ…์ŠคํŠธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ
18
  pdf_context = extract_pdf_text([
19
  "assets/Programming-Fundamentals-1570222270.pdf",
20
  "assets/1๋ถ„ํŒŒ์ด์ฌ_๊ฐ•์˜์ž๋ฃŒ_์ „์ฒด.pdf"
21
  ])
22
 
23
+ # ๋ฌด๋ฃŒ ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ FLAN-T5 ๋ชจ๋ธ ์‚ฌ์šฉ
24
  client = InferenceClient(
25
+ model="google/flan-t5-large",
26
+ token=os.getenv("HUGGINGFACEHUB_API_TOKEN")
27
  )
28
 
29
  def respond(message, history, system_message, max_tokens, temperature, top_p):
30
+ # ๋‹จ์ˆœ ํ”„๋กฌํ”„ํŠธ ๊ตฌ์„ฑ (flan-t5๋Š” chat ๊ตฌ์กฐ๊ฐ€ ์•„๋‹˜)
31
+ prompt = f"{system_message}\n\n๋ฌธ์„œ ์š”์•ฝ:\n{pdf_context}\n\n์งˆ๋ฌธ: {message}\n๋‹ต๋ณ€:"
32
+ result = client.text_generation(
33
+ prompt=prompt,
34
+ max_new_tokens=max_tokens,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  temperature=temperature,
36
+ top_p=top_p
37
+ )
38
+ return result.strip()
 
 
 
 
39
 
40
  demo = gr.ChatInterface(
41
  fn=respond,
42
  additional_inputs=[
43
+ gr.Textbox(value="๋‹น์‹ ์€ ํŒŒ์ด์ฌ API ๋ฌธ์„œ์— ๊ธฐ๋ฐ˜ํ•ด ๋‹ต๋ณ€ํ•˜๋Š” ์œ ์šฉํ•œ ์กฐ๊ต์ž…๋‹ˆ๋‹ค.", label="System message"),
44
+ gr.Slider(minimum=1, maximum=1024, value=512, step=1, label="Max new tokens"),
45
  gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
46
  gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p"),
47
  ],
48
+ title="๐Ÿ“˜ ํŒŒ์ด์ฌ API ๋ ˆํผ๋Ÿฐ์Šค ์ฑ—๋ด‡ (FLAN-T5 ๊ธฐ๋ฐ˜)",
49
+ description="ํ•œ๊ตญ๊ณต๋Œ€ ์ˆ˜์—…์ž๋ฃŒ ๊ธฐ๋ฐ˜์œผ๋กœ ์งˆ๋ฌธ์— ๋‹ตํ•˜๋Š” ๋ฌด๋ฃŒ ์ฑ—๋ด‡์ž…๋‹ˆ๋‹ค."
50
  )
51
 
52
  if __name__ == "__main__":