FlameF0X commited on
Commit
4b438d0
·
verified ·
1 Parent(s): 97172ac

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +145 -0
  2. requirements.txt +3 -0
app.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr # type: ignore
2
+ from transformers import pipeline
3
+
4
+ def chat_with_model(user_input, messages, pipe):
5
+ """
6
+ Chat with the model and return only its response.
7
+
8
+ Args:
9
+ user_input (str): The user's input message
10
+ messages (list): List of conversation messages
11
+ pipe: The transformer pipeline object
12
+
13
+ Returns:
14
+ tuple: (str, list) - The model's response and updated messages
15
+ """
16
+ messages.append({"role": "user", "content": user_input})
17
+
18
+ # Convert messages to format expected by model
19
+ formatted_messages = [{'role': m['role'], 'content': str(m['content'])} for m in messages]
20
+
21
+ # Get response from model
22
+ response = pipe(formatted_messages, max_new_tokens=2048)[0]['generated_text']
23
+
24
+ # Extract just the assistant's response
25
+ # This assumes the model returns the full conversation including the new response
26
+ # We need to parse out just the new response
27
+ try:
28
+ # Try to find the last assistant response in the generated text
29
+ parts = response.split("assistant', 'content': '")
30
+ if len(parts) > 1:
31
+ last_response = parts[-1].split("'}")[0]
32
+ else:
33
+ # Fallback: just use the whole response if we can't parse it
34
+ last_response = response
35
+ except Exception:
36
+ last_response = response
37
+
38
+ # Add the response to messages history
39
+ messages.append({"role": "assistant", "content": last_response})
40
+
41
+ return last_response, messages
42
+
43
+ # Initialize the pipeline
44
+ pipe = pipeline("text-generation", model="HuggingFaceTB/SmolLM2-1.7B-Instruct") # Replace with your model
45
+
46
+ # Initialize conversation history
47
+ messages = [
48
+ {"role": "system", "content": """You are **ModuAssist**, created by the LearnModu Team to help beginners and experienced developers alike with the **Modu programming language**. You mainly speak english and you're integrated into their blog, which provides resources and tutorials about Modu.
49
+
50
+ ### Key Information:
51
+ - **Modu** was developed by Cyteon and released on **December 11, 2024**.
52
+ - The LearnModu blog covers all features of Modu, including installation, syntax, and functionality.
53
+
54
+ ---
55
+
56
+ ### Installation
57
+ **1. Through Cargo (Recommended)**
58
+ - Install **Rust**, which includes Cargo.
59
+ - Check if Cargo is installed: cargo --version.
60
+ - Run: cargo +nightly install modu.
61
+ - Verify installation: modu.
62
+ - **VSCode Users:** Download the Modu extension on GitHub.
63
+
64
+ **2. Through Binaries**
65
+ - Download Modu binaries from GitHub Actions.
66
+ - Add them to your PATH environment variable.
67
+ - Verify installation: modu.
68
+
69
+ ---
70
+
71
+ ### Syntax Overview
72
+ **Hello World:**
73
+ print("Hello, World!");
74
+
75
+ **User Input:**
76
+ let string = input("Print something: ");
77
+ print(string);
78
+
79
+ **Variables and Types:**
80
+ - Automatic type assignment for variables.
81
+ let string = "text";
82
+ let integer = 34;
83
+ let boolean = true;
84
+
85
+ **If Statements:**
86
+ if a == b {
87
+ print("Equal!");
88
+ } if a !== b {
89
+ print("Not Equal!");
90
+ }
91
+
92
+ **Custom Functions:**
93
+ fn wave(person) {
94
+ print("Hello, ", person, "!");
95
+ }
96
+ wave("Alice");
97
+
98
+ **Importing Libraries:**
99
+ - Import libraries with import.
100
+ import "math" as m;
101
+ let value = m.sqrt(16);
102
+
103
+ ---
104
+
105
+ ### Advanced Features
106
+ - **Packages:** Install with modu install <package-name>.
107
+ - **File Imports:**
108
+ Example with main.modu importing something.modu:
109
+ import "something.modu" as sm;
110
+ sm.doSomething();
111
+
112
+ Unfortunately, Modu does not support loops (workaround is the basic_loops package, that adds function loop(another_function, start, end)) and there are also no arrays or dictionaries.
113
+
114
+ Your main goal is to assist users in debugging, fixing, and understanding Modu programs."""}, # Your full system prompt here
115
+ ]
116
+
117
+ def main():
118
+ global messages # Add this line to declare messages as global
119
+ while True:
120
+ user_input = input("You: ")
121
+ if user_input.lower() == "exit":
122
+ break
123
+
124
+ response, messages = chat_with_model(user_input, messages, pipe)
125
+ print(f"Model: {response}")
126
+
127
+ # Add the Gradio interface
128
+ iface = gr.Interface(
129
+ fn=chat_with_model,
130
+ inputs=[
131
+ gr.inputs.Textbox(lines=2, placeholder="Enter your message here..."),
132
+ "state",
133
+ gr.State(messages), # Initialize with your messages list
134
+ ],
135
+ outputs=[
136
+ gr.outputs.Textbox(label="Model Response"),
137
+ "state",
138
+ ],
139
+ title="ModuAssist Chatbot",
140
+ description="Chat with ModuAssist for help with the Modu programming language.",
141
+ )
142
+
143
+ if __name__ == "__main__":
144
+ iface.launch()
145
+ main()
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ gradio==3.35.2
2
+ transformers==4.31.0
3
+ torch