Created using Colaboratory
Browse files- notebooks/01-Basic_Tutor.ipynb +153 -88
notebooks/01-Basic_Tutor.ipynb
CHANGED
@@ -1,10 +1,26 @@
|
|
1 |
{
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
"cells": [
|
3 |
{
|
4 |
"cell_type": "markdown",
|
5 |
"metadata": {
|
6 |
-
"
|
7 |
-
"
|
8 |
},
|
9 |
"source": [
|
10 |
"<a href=\"https://colab.research.google.com/github/towardsai/ai-tutor-rag-system/blob/main/notebooks/01-Basic_Tutor.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
|
@@ -12,94 +28,108 @@
|
|
12 |
},
|
13 |
{
|
14 |
"cell_type": "markdown",
|
15 |
-
"metadata": {
|
16 |
-
"id": "DMXyyXD0xix9"
|
17 |
-
},
|
18 |
"source": [
|
19 |
"# Install Packages and Setup Variables"
|
20 |
-
]
|
|
|
|
|
|
|
21 |
},
|
22 |
{
|
23 |
"cell_type": "code",
|
24 |
"execution_count": 1,
|
25 |
"metadata": {
|
26 |
-
"id": "o4Q0N2omkAoZ"
|
|
|
|
|
|
|
|
|
27 |
},
|
28 |
-
"outputs": [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
"source": [
|
30 |
-
"!pip install -q openai==1.
|
31 |
]
|
32 |
},
|
33 |
{
|
34 |
"cell_type": "code",
|
35 |
-
"execution_count": 2,
|
36 |
-
"metadata": {
|
37 |
-
"id": "xxK7EAAvr2aT"
|
38 |
-
},
|
39 |
-
"outputs": [],
|
40 |
"source": [
|
41 |
"import os\n",
|
42 |
"\n",
|
43 |
"# Set the \"OPENAI_API_KEY\" in the Python environment. Will be used by OpenAI client later.\n",
|
44 |
"os.environ[\"OPENAI_API_KEY\"] = \"<YOUR_OPENAI_KEY>\""
|
45 |
-
]
|
|
|
|
|
|
|
|
|
|
|
46 |
},
|
47 |
{
|
48 |
"cell_type": "markdown",
|
49 |
-
"metadata": {
|
50 |
-
"id": "68RbStS-xpbL"
|
51 |
-
},
|
52 |
"source": [
|
53 |
"# Load the API client"
|
54 |
-
]
|
|
|
|
|
|
|
55 |
},
|
56 |
{
|
57 |
"cell_type": "code",
|
58 |
-
"execution_count": 3,
|
59 |
-
"metadata": {
|
60 |
-
"id": "La8hdWqJkFkh"
|
61 |
-
},
|
62 |
-
"outputs": [],
|
63 |
"source": [
|
64 |
"from openai import OpenAI\n",
|
65 |
"\n",
|
66 |
"# Defining the \"client\" object that enables\n",
|
67 |
"# us to connect to OpenAI API endpoints.\n",
|
68 |
"client = OpenAI()"
|
69 |
-
]
|
|
|
|
|
|
|
|
|
|
|
70 |
},
|
71 |
{
|
72 |
"cell_type": "markdown",
|
73 |
-
"metadata": {
|
74 |
-
"id": "CC-sa_uv6J2C"
|
75 |
-
},
|
76 |
"source": [
|
77 |
"# Query the API"
|
78 |
-
]
|
|
|
|
|
|
|
79 |
},
|
80 |
{
|
81 |
"cell_type": "code",
|
82 |
-
"execution_count": 4,
|
83 |
-
"metadata": {
|
84 |
-
"id": "7JRrn0uIsBfg"
|
85 |
-
},
|
86 |
-
"outputs": [],
|
87 |
"source": [
|
88 |
"# Define two questions: 1) Related to AI, 2) Unrelated topic.\n",
|
89 |
"# These questions will be used to evaluate model's performance.\n",
|
90 |
"QUESTION_AI = \"List a number of famous artificial intelligence frameworks?\"\n",
|
91 |
"QUESTION_NOT_AI = \"What is the name of the highest mountain in the world and its height?\""
|
92 |
-
]
|
|
|
|
|
|
|
|
|
|
|
93 |
},
|
94 |
{
|
95 |
"cell_type": "code",
|
96 |
-
"execution_count": 5,
|
97 |
-
"metadata": {
|
98 |
-
"id": "CcP26IauuBuV"
|
99 |
-
},
|
100 |
-
"outputs": [],
|
101 |
"source": [
|
102 |
-
"# Defining a function to answer a question using \"gpt-3.5-turbo-
|
103 |
"def ask_ai_tutor(question):\n",
|
104 |
" try:\n",
|
105 |
" # Formulating the system prompt and condition the model to answer only AI-related questions.\n",
|
@@ -113,8 +143,8 @@
|
|
113 |
"\n",
|
114 |
" # Call the OpenAI API\n",
|
115 |
" response = client.chat.completions.create(\n",
|
116 |
-
" model='gpt-3.5-turbo-
|
117 |
-
" temperature=0,\n",
|
118 |
" messages=[\n",
|
119 |
" {\"role\": \"system\", \"content\": system_prompt},\n",
|
120 |
" {\"role\": \"user\", \"content\": prompt}\n",
|
@@ -126,22 +156,32 @@
|
|
126 |
"\n",
|
127 |
" except Exception as e:\n",
|
128 |
" return f\"An error occurred: {e}\""
|
129 |
-
]
|
|
|
|
|
|
|
|
|
|
|
130 |
},
|
131 |
{
|
132 |
"cell_type": "code",
|
133 |
-
"
|
|
|
|
|
|
|
|
|
134 |
"metadata": {
|
135 |
"colab": {
|
136 |
"base_uri": "https://localhost:8080/"
|
137 |
},
|
138 |
"id": "W_dbwURpufR7",
|
139 |
-
"outputId": "
|
140 |
},
|
|
|
141 |
"outputs": [
|
142 |
{
|
143 |
-
"name": "stdout",
|
144 |
"output_type": "stream",
|
|
|
145 |
"text": [
|
146 |
"Sure! There are several famous artificial intelligence frameworks that are widely used in the field. Some of the popular ones include:\n",
|
147 |
"\n",
|
@@ -158,69 +198,94 @@
|
|
158 |
"These are just a few examples of famous AI frameworks, and there are many others available depending on specific needs and preferences.\n"
|
159 |
]
|
160 |
}
|
161 |
-
],
|
162 |
-
"source": [
|
163 |
-
"# Ask the AI-related question.\n",
|
164 |
-
"res = ask_ai_tutor( QUESTION_AI )\n",
|
165 |
-
"print( res )"
|
166 |
]
|
167 |
},
|
168 |
{
|
169 |
"cell_type": "code",
|
170 |
-
"
|
|
|
|
|
|
|
|
|
171 |
"metadata": {
|
172 |
"colab": {
|
173 |
"base_uri": "https://localhost:8080/"
|
174 |
},
|
175 |
"id": "37YuVJQquhpN",
|
176 |
-
"outputId": "
|
177 |
},
|
|
|
178 |
"outputs": [
|
179 |
{
|
180 |
-
"name": "stdout",
|
181 |
"output_type": "stream",
|
|
|
182 |
"text": [
|
183 |
"I'm sorry, but I cannot answer that question as it is not related to artificial intelligence.\n"
|
184 |
]
|
185 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
186 |
],
|
|
|
|
|
|
|
|
|
|
|
|
|
187 |
"source": [
|
188 |
-
"
|
189 |
-
"
|
190 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
191 |
]
|
192 |
},
|
193 |
{
|
194 |
"cell_type": "code",
|
195 |
-
"
|
196 |
-
"metadata": {
|
197 |
-
|
198 |
-
"source": []
|
199 |
-
}
|
200 |
-
],
|
201 |
-
"metadata": {
|
202 |
-
"colab": {
|
203 |
-
"authorship_tag": "ABX9TyMWsO+T66P1RZEeDMmjXjcm",
|
204 |
-
"include_colab_link": true,
|
205 |
-
"provenance": []
|
206 |
-
},
|
207 |
-
"kernelspec": {
|
208 |
-
"display_name": "Python 3",
|
209 |
-
"name": "python3"
|
210 |
-
},
|
211 |
-
"language_info": {
|
212 |
-
"codemirror_mode": {
|
213 |
-
"name": "ipython",
|
214 |
-
"version": 3
|
215 |
},
|
216 |
-
"
|
217 |
-
"
|
218 |
-
"name": "python",
|
219 |
-
"nbconvert_exporter": "python",
|
220 |
-
"pygments_lexer": "ipython3",
|
221 |
-
"version": "3.11.8"
|
222 |
}
|
223 |
-
|
224 |
-
|
225 |
-
"nbformat_minor": 0
|
226 |
-
}
|
|
|
1 |
{
|
2 |
+
"nbformat": 4,
|
3 |
+
"nbformat_minor": 0,
|
4 |
+
"metadata": {
|
5 |
+
"colab": {
|
6 |
+
"provenance": [],
|
7 |
+
"authorship_tag": "ABX9TyNHj7ZktyV9bZgyjC3mEDFq",
|
8 |
+
"include_colab_link": true
|
9 |
+
},
|
10 |
+
"kernelspec": {
|
11 |
+
"name": "python3",
|
12 |
+
"display_name": "Python 3"
|
13 |
+
},
|
14 |
+
"language_info": {
|
15 |
+
"name": "python"
|
16 |
+
}
|
17 |
+
},
|
18 |
"cells": [
|
19 |
{
|
20 |
"cell_type": "markdown",
|
21 |
"metadata": {
|
22 |
+
"id": "view-in-github",
|
23 |
+
"colab_type": "text"
|
24 |
},
|
25 |
"source": [
|
26 |
"<a href=\"https://colab.research.google.com/github/towardsai/ai-tutor-rag-system/blob/main/notebooks/01-Basic_Tutor.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
|
|
|
28 |
},
|
29 |
{
|
30 |
"cell_type": "markdown",
|
|
|
|
|
|
|
31 |
"source": [
|
32 |
"# Install Packages and Setup Variables"
|
33 |
+
],
|
34 |
+
"metadata": {
|
35 |
+
"id": "DMXyyXD0xix9"
|
36 |
+
}
|
37 |
},
|
38 |
{
|
39 |
"cell_type": "code",
|
40 |
"execution_count": 1,
|
41 |
"metadata": {
|
42 |
+
"id": "o4Q0N2omkAoZ",
|
43 |
+
"colab": {
|
44 |
+
"base_uri": "https://localhost:8080/"
|
45 |
+
},
|
46 |
+
"outputId": "703fe996-2acf-4e90-92c1-252041ba7d7a"
|
47 |
},
|
48 |
+
"outputs": [
|
49 |
+
{
|
50 |
+
"output_type": "stream",
|
51 |
+
"name": "stdout",
|
52 |
+
"text": [
|
53 |
+
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m225.4/225.4 kB\u001b[0m \u001b[31m3.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
54 |
+
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m51.7/51.7 kB\u001b[0m \u001b[31m1.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
55 |
+
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m2.0/2.0 MB\u001b[0m \u001b[31m8.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
56 |
+
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m75.6/75.6 kB\u001b[0m \u001b[31m2.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
57 |
+
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m3.1/3.1 MB\u001b[0m \u001b[31m17.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
58 |
+
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m77.8/77.8 kB\u001b[0m \u001b[31m6.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
59 |
+
"\u001b[2K \u001b[90mββββββββββββββββββββββββββββββββββββββββ\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m5.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n",
|
60 |
+
"\u001b[?25h"
|
61 |
+
]
|
62 |
+
}
|
63 |
+
],
|
64 |
"source": [
|
65 |
+
"!pip install -q openai==1.6.0 cohere==4.39 tiktoken==0.5.2"
|
66 |
]
|
67 |
},
|
68 |
{
|
69 |
"cell_type": "code",
|
|
|
|
|
|
|
|
|
|
|
70 |
"source": [
|
71 |
"import os\n",
|
72 |
"\n",
|
73 |
"# Set the \"OPENAI_API_KEY\" in the Python environment. Will be used by OpenAI client later.\n",
|
74 |
"os.environ[\"OPENAI_API_KEY\"] = \"<YOUR_OPENAI_KEY>\""
|
75 |
+
],
|
76 |
+
"metadata": {
|
77 |
+
"id": "xxK7EAAvr2aT"
|
78 |
+
},
|
79 |
+
"execution_count": 2,
|
80 |
+
"outputs": []
|
81 |
},
|
82 |
{
|
83 |
"cell_type": "markdown",
|
|
|
|
|
|
|
84 |
"source": [
|
85 |
"# Load the API client"
|
86 |
+
],
|
87 |
+
"metadata": {
|
88 |
+
"id": "68RbStS-xpbL"
|
89 |
+
}
|
90 |
},
|
91 |
{
|
92 |
"cell_type": "code",
|
|
|
|
|
|
|
|
|
|
|
93 |
"source": [
|
94 |
"from openai import OpenAI\n",
|
95 |
"\n",
|
96 |
"# Defining the \"client\" object that enables\n",
|
97 |
"# us to connect to OpenAI API endpoints.\n",
|
98 |
"client = OpenAI()"
|
99 |
+
],
|
100 |
+
"metadata": {
|
101 |
+
"id": "La8hdWqJkFkh"
|
102 |
+
},
|
103 |
+
"execution_count": 3,
|
104 |
+
"outputs": []
|
105 |
},
|
106 |
{
|
107 |
"cell_type": "markdown",
|
|
|
|
|
|
|
108 |
"source": [
|
109 |
"# Query the API"
|
110 |
+
],
|
111 |
+
"metadata": {
|
112 |
+
"id": "CC-sa_uv6J2C"
|
113 |
+
}
|
114 |
},
|
115 |
{
|
116 |
"cell_type": "code",
|
|
|
|
|
|
|
|
|
|
|
117 |
"source": [
|
118 |
"# Define two questions: 1) Related to AI, 2) Unrelated topic.\n",
|
119 |
"# These questions will be used to evaluate model's performance.\n",
|
120 |
"QUESTION_AI = \"List a number of famous artificial intelligence frameworks?\"\n",
|
121 |
"QUESTION_NOT_AI = \"What is the name of the highest mountain in the world and its height?\""
|
122 |
+
],
|
123 |
+
"metadata": {
|
124 |
+
"id": "7JRrn0uIsBfg"
|
125 |
+
},
|
126 |
+
"execution_count": 4,
|
127 |
+
"outputs": []
|
128 |
},
|
129 |
{
|
130 |
"cell_type": "code",
|
|
|
|
|
|
|
|
|
|
|
131 |
"source": [
|
132 |
+
"# Defining a function to answer a question using \"gpt-3.5-turbo-16k\" model.\n",
|
133 |
"def ask_ai_tutor(question):\n",
|
134 |
" try:\n",
|
135 |
" # Formulating the system prompt and condition the model to answer only AI-related questions.\n",
|
|
|
143 |
"\n",
|
144 |
" # Call the OpenAI API\n",
|
145 |
" response = client.chat.completions.create(\n",
|
146 |
+
" model='gpt-3.5-turbo-16k',\n",
|
147 |
+
" temperature=0.0,\n",
|
148 |
" messages=[\n",
|
149 |
" {\"role\": \"system\", \"content\": system_prompt},\n",
|
150 |
" {\"role\": \"user\", \"content\": prompt}\n",
|
|
|
156 |
"\n",
|
157 |
" except Exception as e:\n",
|
158 |
" return f\"An error occurred: {e}\""
|
159 |
+
],
|
160 |
+
"metadata": {
|
161 |
+
"id": "CcP26IauuBuV"
|
162 |
+
},
|
163 |
+
"execution_count": 5,
|
164 |
+
"outputs": []
|
165 |
},
|
166 |
{
|
167 |
"cell_type": "code",
|
168 |
+
"source": [
|
169 |
+
"# Ask the AI-related question.\n",
|
170 |
+
"RES_AI = ask_ai_tutor( QUESTION_AI )\n",
|
171 |
+
"print( RES_AI )"
|
172 |
+
],
|
173 |
"metadata": {
|
174 |
"colab": {
|
175 |
"base_uri": "https://localhost:8080/"
|
176 |
},
|
177 |
"id": "W_dbwURpufR7",
|
178 |
+
"outputId": "3cd84fb9-fe6f-4561-e9ee-ed606a983629"
|
179 |
},
|
180 |
+
"execution_count": 10,
|
181 |
"outputs": [
|
182 |
{
|
|
|
183 |
"output_type": "stream",
|
184 |
+
"name": "stdout",
|
185 |
"text": [
|
186 |
"Sure! There are several famous artificial intelligence frameworks that are widely used in the field. Some of the popular ones include:\n",
|
187 |
"\n",
|
|
|
198 |
"These are just a few examples of famous AI frameworks, and there are many others available depending on specific needs and preferences.\n"
|
199 |
]
|
200 |
}
|
|
|
|
|
|
|
|
|
|
|
201 |
]
|
202 |
},
|
203 |
{
|
204 |
"cell_type": "code",
|
205 |
+
"source": [
|
206 |
+
"# Ask the unrelated question.\n",
|
207 |
+
"RES_NOT_AI = ask_ai_tutor( QUESTION_NOT_AI )\n",
|
208 |
+
"print( RES_NOT_AI )"
|
209 |
+
],
|
210 |
"metadata": {
|
211 |
"colab": {
|
212 |
"base_uri": "https://localhost:8080/"
|
213 |
},
|
214 |
"id": "37YuVJQquhpN",
|
215 |
+
"outputId": "4550c44d-2150-4cca-f23e-c89ea43e2040"
|
216 |
},
|
217 |
+
"execution_count": 12,
|
218 |
"outputs": [
|
219 |
{
|
|
|
220 |
"output_type": "stream",
|
221 |
+
"name": "stdout",
|
222 |
"text": [
|
223 |
"I'm sorry, but I cannot answer that question as it is not related to artificial intelligence.\n"
|
224 |
]
|
225 |
}
|
226 |
+
]
|
227 |
+
},
|
228 |
+
{
|
229 |
+
"cell_type": "markdown",
|
230 |
+
"source": [
|
231 |
+
"# History"
|
232 |
],
|
233 |
+
"metadata": {
|
234 |
+
"id": "NRBgk6WToIK0"
|
235 |
+
}
|
236 |
+
},
|
237 |
+
{
|
238 |
+
"cell_type": "code",
|
239 |
"source": [
|
240 |
+
"response = client.chat.completions.create(\n",
|
241 |
+
" model='gpt-3.5-turbo-16k',\n",
|
242 |
+
" temperature=0.0,\n",
|
243 |
+
" messages=[\n",
|
244 |
+
" {\"role\": \"system\", \"content\": \"You are an AI tutor specialized in answering artificial intelligence-related questions. Only answer AI-related question, else say that you cannot answer this question.\"},\n",
|
245 |
+
" {\"role\": \"user\", \"content\": \"Please provide an informative and accurate answer to the following question.\\nQuestion: List a number of famous artificial intelligence frameworks?\\nAnswer:\"},\n",
|
246 |
+
" {\"role\": \"assistant\", \"content\": RES_AI},\n",
|
247 |
+
" {\"role\": \"user\", \"content\": \"Please provide an informative and accurate answer to the following question.\\nQuestion: What is the name of the highest mountain in the world and its height?\\nAnswer:\"},\n",
|
248 |
+
" {\"role\": \"assistant\", \"content\": RES_NOT_AI},\n",
|
249 |
+
" {\"role\": \"user\", \"content\": \"Please provide an informative and accurate answer to the following question.\\nQuestion: Can you write a summary of the first suggested AI framework in the first question?\\nAnswer:\"}\n",
|
250 |
+
" ]\n",
|
251 |
+
" )\n",
|
252 |
+
"\n",
|
253 |
+
"print( response.choices[0].message.content.strip() )"
|
254 |
+
],
|
255 |
+
"metadata": {
|
256 |
+
"colab": {
|
257 |
+
"base_uri": "https://localhost:8080/"
|
258 |
+
},
|
259 |
+
"id": "0_6GN2XsoEyM",
|
260 |
+
"outputId": "3e66a833-a552-4bcc-9808-7b9f6b539310"
|
261 |
+
},
|
262 |
+
"execution_count": 15,
|
263 |
+
"outputs": [
|
264 |
+
{
|
265 |
+
"output_type": "stream",
|
266 |
+
"name": "stdout",
|
267 |
+
"text": [
|
268 |
+
"Certainly! The first suggested AI framework in the previous question was TensorFlow. TensorFlow is an open-source framework developed by Google that has gained significant popularity in the field of artificial intelligence. It is primarily used for building and training machine learning and deep learning models.\n",
|
269 |
+
"\n",
|
270 |
+
"TensorFlow provides a comprehensive ecosystem of tools, libraries, and resources that make it easier for developers to create and deploy AI models. It offers a flexible architecture that allows for efficient computation on both CPUs and GPUs, enabling faster training and inference.\n",
|
271 |
+
"\n",
|
272 |
+
"One of the key features of TensorFlow is its ability to construct and execute computational graphs. These graphs represent the flow of data through a series of mathematical operations, making it easier to visualize and understand the model's structure. TensorFlow also supports automatic differentiation, which simplifies the process of calculating gradients for training neural networks.\n",
|
273 |
+
"\n",
|
274 |
+
"Moreover, TensorFlow has a vast community of users and contributors, which means there is extensive documentation, tutorials, and pre-trained models available. This makes it easier for developers to get started and leverage the collective knowledge of the community.\n",
|
275 |
+
"\n",
|
276 |
+
"Overall, TensorFlow is a powerful and versatile AI framework that has been widely adopted in various domains, including computer vision, natural language processing, and reinforcement learning. Its flexibility, scalability, and extensive community support make it a popular choice for both researchers and practitioners in the field of artificial intelligence.\n"
|
277 |
+
]
|
278 |
+
}
|
279 |
]
|
280 |
},
|
281 |
{
|
282 |
"cell_type": "code",
|
283 |
+
"source": [],
|
284 |
+
"metadata": {
|
285 |
+
"id": "ET_l06LiojaN"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
286 |
},
|
287 |
+
"execution_count": null,
|
288 |
+
"outputs": []
|
|
|
|
|
|
|
|
|
289 |
}
|
290 |
+
]
|
291 |
+
}
|
|
|
|