Update app.py
Browse files
app.py
CHANGED
@@ -15,20 +15,38 @@ LLM_MODELS = {
|
|
15 |
def get_client(model_name):
|
16 |
return InferenceClient(LLM_MODELS[model_name], token=os.getenv("HF_TOKEN"))
|
17 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
18 |
def read_uploaded_file(file):
|
19 |
if file is None:
|
20 |
-
return ""
|
21 |
try:
|
22 |
if file.name.endswith('.parquet'):
|
23 |
df = pd.read_parquet(file.name, engine='pyarrow')
|
24 |
-
|
|
|
25 |
else:
|
26 |
content = file.read()
|
27 |
if isinstance(content, bytes):
|
28 |
-
|
29 |
-
return content
|
30 |
except Exception as e:
|
31 |
-
return f"νμΌμ μ½λ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}"
|
32 |
|
33 |
def format_history(history):
|
34 |
formatted_history = []
|
@@ -39,19 +57,25 @@ def format_history(history):
|
|
39 |
return formatted_history
|
40 |
|
41 |
def chat(message, history, uploaded_file, model_name, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
|
42 |
-
system_prefix = """λ°λμ νκΈλ‘ λ΅λ³ν κ². λλ μ£Όμ΄μ§ μμ€μ½λλ λ°μ΄ν°λ₯Ό κΈ°λ°μΌλ‘ "μλΉμ€ μ¬μ© μ€λͺ
λ° μλ΄, Q&Aλ₯Ό νλ μν μ΄λ€". μμ£Ό μΉμ νκ³ μμΈνκ² 4000ν ν° μ΄μ Markdown νμμΌλ‘ μμ±νλΌ. λλ μ
λ ₯λ λ΄μ©μ κΈ°λ°μΌλ‘ μ¬μ© μ€λͺ
λ° μ§μ μλ΅μ μ§ννλ©°, μ΄μ©μμκ² λμμ μ£Όμ΄μΌ νλ€.
|
43 |
|
44 |
if uploaded_file:
|
45 |
-
content = read_uploaded_file(uploaded_file)
|
46 |
-
|
|
|
|
|
|
|
|
|
47 |
|
48 |
-
if
|
49 |
system_message += f"\n\nνμΌ λ΄μ©:\n```markdown\n{content}\n```"
|
50 |
else:
|
51 |
system_message += f"\n\nνμΌ λ΄μ©:\n```python\n{content}\n```"
|
52 |
|
53 |
if message == "νμΌ λΆμμ μμν©λλ€.":
|
54 |
-
message = """
|
|
|
|
|
55 |
1. νμΌμ μ£Όμ λͺ©μ κ³Ό κΈ°λ₯
|
56 |
2. μ£Όμ νΉμ§κ³Ό ꡬμ±μμ
|
57 |
3. νμ© λ°©λ² λ° μ¬μ© μλ리μ€
|
@@ -86,7 +110,6 @@ def chat(message, history, uploaded_file, model_name, system_message="", max_tok
|
|
86 |
css = """
|
87 |
footer {visibility: hidden}
|
88 |
"""
|
89 |
-
# ... (μ΄μ μ½λ λμΌ)
|
90 |
|
91 |
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
|
92 |
with gr.Row():
|
@@ -101,7 +124,7 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
|
|
101 |
clear = gr.ClearButton([msg, chatbot])
|
102 |
|
103 |
with gr.Column(scale=1):
|
104 |
-
model_name = gr.
|
105 |
choices=list(LLM_MODELS.keys()),
|
106 |
value="Default",
|
107 |
label="LLM λͺ¨λΈ μ ν",
|
@@ -110,7 +133,7 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
|
|
110 |
|
111 |
file_upload = gr.File(
|
112 |
label="νμΌ μ
λ‘λ",
|
113 |
-
file_types=["text", ".parquet"],
|
114 |
type="filepath"
|
115 |
)
|
116 |
|
@@ -120,8 +143,6 @@ with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
|
|
120 |
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature")
|
121 |
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P")
|
122 |
|
123 |
-
|
124 |
-
|
125 |
# μ΄λ²€νΈ λ°μΈλ©
|
126 |
msg.submit(
|
127 |
chat,
|
|
|
15 |
def get_client(model_name):
|
16 |
return InferenceClient(LLM_MODELS[model_name], token=os.getenv("HF_TOKEN"))
|
17 |
|
18 |
+
def analyze_file_content(content, file_type):
|
19 |
+
"""νμΌ λ΄μ©μ λΆμνμ¬ 1μ€ μμ½μ λ°ν"""
|
20 |
+
if file_type == 'parquet':
|
21 |
+
return f"λ°μ΄ν°μ
λΆμ: {content.count('|')-1}κ° μ»¬λΌμ λ°μ΄ν° ν
μ΄λΈ"
|
22 |
+
|
23 |
+
# ν
μ€νΈ νμΌμ κ²½μ°
|
24 |
+
lines = content.split('\n')
|
25 |
+
total_lines = len(lines)
|
26 |
+
non_empty_lines = len([line for line in lines if line.strip()])
|
27 |
+
|
28 |
+
if 'def ' in content or 'class ' in content:
|
29 |
+
functions = len([line for line in lines if 'def ' in line])
|
30 |
+
classes = len([line for line in lines if 'class ' in line])
|
31 |
+
return f"μ½λ λΆμ: {total_lines}μ€μ Python μ½λ ({functions}κ° ν¨μ, {classes}κ° ν΄λμ€ ν¬ν¨)"
|
32 |
+
else:
|
33 |
+
return f"ν
μ€νΈ λΆμ: {total_lines}μ€μ ν
μ€νΈ λ¬Έμ (μ ν¨ λ΄μ© {non_empty_lines}μ€)"
|
34 |
+
|
35 |
def read_uploaded_file(file):
|
36 |
if file is None:
|
37 |
+
return "", ""
|
38 |
try:
|
39 |
if file.name.endswith('.parquet'):
|
40 |
df = pd.read_parquet(file.name, engine='pyarrow')
|
41 |
+
content = df.head(10).to_markdown(index=False)
|
42 |
+
return content, "parquet"
|
43 |
else:
|
44 |
content = file.read()
|
45 |
if isinstance(content, bytes):
|
46 |
+
content = content.decode('utf-8')
|
47 |
+
return content, "text"
|
48 |
except Exception as e:
|
49 |
+
return f"νμΌμ μ½λ μ€ μ€λ₯κ° λ°μνμ΅λλ€: {str(e)}", "error"
|
50 |
|
51 |
def format_history(history):
|
52 |
formatted_history = []
|
|
|
57 |
return formatted_history
|
58 |
|
59 |
def chat(message, history, uploaded_file, model_name, system_message="", max_tokens=4000, temperature=0.7, top_p=0.9):
|
60 |
+
system_prefix = """λ°λμ νκΈλ‘ λ΅λ³ν κ². λλ μ£Όμ΄μ§ μμ€μ½λλ λ°μ΄ν°λ₯Ό κΈ°λ°μΌλ‘ "μλΉμ€ μ¬μ© μ€λͺ
λ° μλ΄, Q&Aλ₯Ό νλ μν μ΄λ€". μμ£Ό μΉμ νκ³ μμΈνκ² 4000ν ν° μ΄μ Markdown νμμΌλ‘ μμ±νλΌ. λλ μ
λ ₯λ λ΄μ©μ κΈ°λ°μΌλ‘ μ¬μ© μ€λͺ
λ° μ§μ μλ΅μ μ§ννλ©°, μ΄μ©μμκ² λμμ μ£Όμ΄μΌ νλ€."""
|
61 |
|
62 |
if uploaded_file:
|
63 |
+
content, file_type = read_uploaded_file(uploaded_file)
|
64 |
+
if file_type == "error":
|
65 |
+
return "", history + [[message, content]]
|
66 |
+
|
67 |
+
# νμΌ λ΄μ© λΆμ λ° μμ½
|
68 |
+
file_summary = analyze_file_content(content, file_type)
|
69 |
|
70 |
+
if file_type == 'parquet':
|
71 |
system_message += f"\n\nνμΌ λ΄μ©:\n```markdown\n{content}\n```"
|
72 |
else:
|
73 |
system_message += f"\n\nνμΌ λ΄μ©:\n```python\n{content}\n```"
|
74 |
|
75 |
if message == "νμΌ λΆμμ μμν©λλ€.":
|
76 |
+
message = f"""[νμΌ μμ½] {file_summary}
|
77 |
+
|
78 |
+
λ€μ λ΄μ©μ ν¬ν¨νμ¬ μμΈν μ€λͺ
νλΌ:
|
79 |
1. νμΌμ μ£Όμ λͺ©μ κ³Ό κΈ°λ₯
|
80 |
2. μ£Όμ νΉμ§κ³Ό ꡬμ±μμ
|
81 |
3. νμ© λ°©λ² λ° μ¬μ© μλ리μ€
|
|
|
110 |
css = """
|
111 |
footer {visibility: hidden}
|
112 |
"""
|
|
|
113 |
|
114 |
with gr.Blocks(theme="Yntec/HaleyCH_Theme_Orange", css=css) as demo:
|
115 |
with gr.Row():
|
|
|
124 |
clear = gr.ClearButton([msg, chatbot])
|
125 |
|
126 |
with gr.Column(scale=1):
|
127 |
+
model_name = gr.Radio(
|
128 |
choices=list(LLM_MODELS.keys()),
|
129 |
value="Default",
|
130 |
label="LLM λͺ¨λΈ μ ν",
|
|
|
133 |
|
134 |
file_upload = gr.File(
|
135 |
label="νμΌ μ
λ‘λ",
|
136 |
+
file_types=["text", ".parquet"],
|
137 |
type="filepath"
|
138 |
)
|
139 |
|
|
|
143 |
temperature = gr.Slider(minimum=0, maximum=1, value=0.7, label="Temperature")
|
144 |
top_p = gr.Slider(minimum=0, maximum=1, value=0.9, label="Top P")
|
145 |
|
|
|
|
|
146 |
# μ΄λ²€νΈ λ°μΈλ©
|
147 |
msg.submit(
|
148 |
chat,
|