fklitt commited on
Commit
b568846
1 Parent(s): 0bad45d

Updated with feature view

Browse files
__pycache__/feature_pipeline.cpython-311.pyc CHANGED
Binary files a/__pycache__/feature_pipeline.cpython-311.pyc and b/__pycache__/feature_pipeline.cpython-311.pyc differ
 
feature_engineering.ipynb CHANGED
@@ -64,7 +64,7 @@
64
  "name": "python",
65
  "nbconvert_exporter": "python",
66
  "pygments_lexer": "ipython3",
67
- "version": "3.11.4"
68
  },
69
  "orig_nbformat": 4
70
  },
 
64
  "name": "python",
65
  "nbconvert_exporter": "python",
66
  "pygments_lexer": "ipython3",
67
+ "version": "3.11.9"
68
  },
69
  "orig_nbformat": 4
70
  },
feature_pipeline.ipynb CHANGED
@@ -2,18 +2,178 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 3,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
9
- "#from dotenv import load_dotenv\n",
10
- "#import os "
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
11
  ]
12
  },
13
  {
14
  "cell_type": "code",
15
  "execution_count": 4,
16
  "metadata": {},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  "outputs": [
18
  {
19
  "name": "stdout",
@@ -21,11 +181,11 @@
21
  "text": [
22
  " 1. open 2. high 3. low 4. close 5. volume\n",
23
  "date \n",
 
 
 
24
  "2024-04-30 186.98 190.95 182.8401 183.28 127031787.0\n",
25
- "2024-04-29 188.42 198.87 184.5400 194.05 243869678.0\n",
26
- "2024-04-26 168.85 172.12 166.3700 168.29 109815725.0\n",
27
- "2024-04-25 158.96 170.88 158.3600 170.18 126427521.0\n",
28
- "2024-04-24 162.84 167.97 157.5100 162.13 181178020.0\n"
29
  ]
30
  }
31
  ],
@@ -111,101 +271,85 @@
111
  ],
112
  "source": [
113
  "# Define your file path and name\n",
114
- "file_path = 'TSLA_stock_price.csv' # Customize the path and filename\n",
115
  "\n",
116
  "# Save the DataFrame to CSV\n",
117
- "data.to_csv(file_path)\n",
118
  "\n",
119
- "print(f\"Data saved to {file_path}\")\n"
120
  ]
121
  },
122
  {
123
  "cell_type": "code",
124
- "execution_count": 7,
125
  "metadata": {},
126
  "outputs": [
127
  {
128
  "name": "stdout",
129
  "output_type": "stream",
130
  "text": [
131
- " date 1. open 2. high 3. low 4. close 5. volume\n",
132
- "0 2024-04-29 188.42 198.87 184.54 194.05 243869678.0\n",
133
- "1 2024-04-26 168.85 172.12 166.37 168.29 109815725.0\n",
134
- "2 2024-04-25 158.96 170.88 158.36 170.18 126427521.0\n",
135
- "3 2024-04-24 162.84 167.97 157.51 162.13 181178020.0\n",
136
- "4 2024-04-23 143.33 147.26 141.11 144.68 124545104.0\n"
137
  ]
138
  }
139
  ],
140
  "source": [
141
  "# Load and display the data from CSV to confirm\n",
142
- "tsla_df = pd.read_csv(file_path)\n",
143
- "print(tsla_df.head())\n"
 
144
  ]
145
  },
146
  {
147
  "cell_type": "code",
148
- "execution_count": 8,
149
  "metadata": {},
150
  "outputs": [
151
  {
152
  "name": "stdout",
153
  "output_type": "stream",
154
  "text": [
155
- "Connected. Call `.close()` to terminate connection gracefully.\n"
156
- ]
157
- },
158
- {
159
- "name": "stderr",
160
- "output_type": "stream",
161
- "text": [
162
- "\n",
163
- "\n",
164
- "UserWarning: The installed hopsworks client version 3.4.4 may not be compatible with the connected Hopsworks backend version 3.7.1. \n",
165
- "To ensure compatibility please install the latest bug fix release matching the minor version of your backend (3.7) by running 'pip install hopsworks==3.7.*'\n"
166
- ]
167
- },
168
- {
169
- "name": "stdout",
170
- "output_type": "stream",
171
- "text": [
172
  "\n",
173
- "Logged in to project, explore it here https://c.app.hopsworks.ai:443/p/549016\n",
174
  "Connected. Call `.close()` to terminate connection gracefully.\n"
175
  ]
176
  }
177
  ],
178
  "source": [
179
- "import hopsworks\n",
180
- "\n",
181
- "project = hopsworks.login()\n",
182
- "fs = project.get_feature_store()\n"
183
  ]
184
  },
185
  {
186
  "cell_type": "code",
187
- "execution_count": 9,
188
  "metadata": {},
189
  "outputs": [],
190
  "source": [
191
- "#import re "
192
  ]
193
  },
194
  {
195
  "cell_type": "code",
196
- "execution_count": 10,
197
  "metadata": {},
198
  "outputs": [],
199
  "source": [
200
- "#def clean_column_name(name):\n",
201
  " # Remove all non-letter characters\n",
202
- " #cleaned_name = re.sub(r'[^a-zA-Z]', '', name)\n",
203
- " #return cleaned_name\n"
204
  ]
205
  },
206
  {
207
  "cell_type": "code",
208
- "execution_count": 11,
209
  "metadata": {},
210
  "outputs": [
211
  {
@@ -235,53 +379,59 @@
235
  " <th>3. low</th>\n",
236
  " <th>4. close</th>\n",
237
  " <th>5. volume</th>\n",
 
238
  " </tr>\n",
239
  " </thead>\n",
240
  " <tbody>\n",
241
  " <tr>\n",
242
  " <th>0</th>\n",
243
- " <td>2024-04-29</td>\n",
244
- " <td>188.42</td>\n",
245
- " <td>198.8700</td>\n",
246
- " <td>184.54</td>\n",
247
- " <td>194.05</td>\n",
248
- " <td>243869678.0</td>\n",
 
249
  " </tr>\n",
250
  " <tr>\n",
251
  " <th>1</th>\n",
252
- " <td>2024-04-26</td>\n",
253
- " <td>168.85</td>\n",
254
- " <td>172.1200</td>\n",
255
- " <td>166.37</td>\n",
256
- " <td>168.29</td>\n",
257
- " <td>109815725.0</td>\n",
 
258
  " </tr>\n",
259
  " <tr>\n",
260
  " <th>2</th>\n",
261
- " <td>2024-04-25</td>\n",
262
- " <td>158.96</td>\n",
263
- " <td>170.8800</td>\n",
264
- " <td>158.36</td>\n",
265
- " <td>170.18</td>\n",
266
- " <td>126427521.0</td>\n",
 
267
  " </tr>\n",
268
  " <tr>\n",
269
  " <th>3</th>\n",
270
- " <td>2024-04-24</td>\n",
271
- " <td>162.84</td>\n",
272
- " <td>167.9700</td>\n",
273
- " <td>157.51</td>\n",
274
- " <td>162.13</td>\n",
275
- " <td>181178020.0</td>\n",
 
276
  " </tr>\n",
277
  " <tr>\n",
278
  " <th>4</th>\n",
279
- " <td>2024-04-23</td>\n",
280
- " <td>143.33</td>\n",
281
- " <td>147.2600</td>\n",
282
- " <td>141.11</td>\n",
283
- " <td>144.68</td>\n",
284
- " <td>124545104.0</td>\n",
 
285
  " </tr>\n",
286
  " <tr>\n",
287
  " <th>...</th>\n",
@@ -291,140 +441,177 @@
291
  " <td>...</td>\n",
292
  " <td>...</td>\n",
293
  " <td>...</td>\n",
 
294
  " </tr>\n",
295
  " <tr>\n",
296
- " <th>3477</th>\n",
297
  " <td>2010-07-06</td>\n",
298
  " <td>20.00</td>\n",
299
  " <td>20.0000</td>\n",
300
- " <td>15.83</td>\n",
301
  " <td>16.11</td>\n",
302
  " <td>6866900.0</td>\n",
 
303
  " </tr>\n",
304
  " <tr>\n",
305
- " <th>3478</th>\n",
306
  " <td>2010-07-02</td>\n",
307
  " <td>23.00</td>\n",
308
  " <td>23.1000</td>\n",
309
- " <td>18.71</td>\n",
310
  " <td>19.20</td>\n",
311
  " <td>5139800.0</td>\n",
 
312
  " </tr>\n",
313
  " <tr>\n",
314
- " <th>3479</th>\n",
315
  " <td>2010-07-01</td>\n",
316
  " <td>25.00</td>\n",
317
  " <td>25.9200</td>\n",
318
- " <td>20.27</td>\n",
319
  " <td>21.96</td>\n",
320
  " <td>8218800.0</td>\n",
 
321
  " </tr>\n",
322
  " <tr>\n",
323
- " <th>3480</th>\n",
324
  " <td>2010-06-30</td>\n",
325
  " <td>25.79</td>\n",
326
  " <td>30.4192</td>\n",
327
- " <td>23.30</td>\n",
328
  " <td>23.83</td>\n",
329
  " <td>17187100.0</td>\n",
 
330
  " </tr>\n",
331
  " <tr>\n",
332
- " <th>3481</th>\n",
333
  " <td>2010-06-29</td>\n",
334
  " <td>19.00</td>\n",
335
  " <td>25.0000</td>\n",
336
- " <td>17.54</td>\n",
337
  " <td>23.89</td>\n",
338
  " <td>18766300.0</td>\n",
 
339
  " </tr>\n",
340
  " </tbody>\n",
341
  "</table>\n",
342
- "<p>3482 rows × 6 columns</p>\n",
343
  "</div>"
344
  ],
345
  "text/plain": [
346
- " date 1. open 2. high 3. low 4. close 5. volume\n",
347
- "0 2024-04-29 188.42 198.8700 184.54 194.05 243869678.0\n",
348
- "1 2024-04-26 168.85 172.1200 166.37 168.29 109815725.0\n",
349
- "2 2024-04-25 158.96 170.8800 158.36 170.18 126427521.0\n",
350
- "3 2024-04-24 162.84 167.9700 157.51 162.13 181178020.0\n",
351
- "4 2024-04-23 143.33 147.2600 141.11 144.68 124545104.0\n",
352
- "... ... ... ... ... ... ...\n",
353
- "3477 2010-07-06 20.00 20.0000 15.83 16.11 6866900.0\n",
354
- "3478 2010-07-02 23.00 23.1000 18.71 19.20 5139800.0\n",
355
- "3479 2010-07-01 25.00 25.9200 20.27 21.96 8218800.0\n",
356
- "3480 2010-06-30 25.79 30.4192 23.30 23.83 17187100.0\n",
357
- "3481 2010-06-29 19.00 25.0000 17.54 23.89 18766300.0\n",
358
  "\n",
359
- "[3482 rows x 6 columns]"
360
  ]
361
  },
362
- "execution_count": 11,
363
  "metadata": {},
364
  "output_type": "execute_result"
365
  }
366
  ],
367
  "source": [
368
- "#tsla_df"
369
  ]
370
  },
371
  {
372
  "cell_type": "code",
373
- "execution_count": 12,
374
  "metadata": {},
375
  "outputs": [],
376
  "source": [
377
  "# Assuming 'tsla_df' is your DataFrame\n",
378
- "#tsla_df.columns = [clean_column_name(col) for col in tsla_df.columns]\n"
379
  ]
380
  },
381
  {
382
  "cell_type": "code",
383
- "execution_count": 13,
384
  "metadata": {},
385
  "outputs": [
386
  {
387
  "name": "stdout",
388
  "output_type": "stream",
389
  "text": [
390
- "Index(['date', 'open', 'high', 'low', 'close', 'volume'], dtype='object')\n"
391
  ]
392
  }
393
  ],
394
  "source": [
395
- "#print(tsla_df.columns)\n"
396
  ]
397
  },
398
  {
399
  "cell_type": "code",
400
- "execution_count": 14,
401
  "metadata": {},
402
  "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
403
  "source": [
404
  "# Define a feature group\n",
405
  "tesla_fg = fs.get_or_create_feature_group(\n",
406
- " name=\"tsla_stock\",\n",
407
  " description=\"Tesla stock dataset from alpha vantage\",\n",
408
- " version=1,\n",
409
- " primary_key=[\"date\"],\n",
410
- " online_enabled=True,\n",
 
411
  ")"
412
  ]
413
  },
414
  {
415
  "cell_type": "code",
416
- "execution_count": 15,
417
  "metadata": {},
418
  "outputs": [
 
 
 
 
 
 
 
 
419
  {
420
  "data": {
421
  "application/vnd.jupyter.widget-view+json": {
422
- "model_id": "ae6a0214d34943cabcdd66d70198ae3a",
423
  "version_major": 2,
424
  "version_minor": 0
425
  },
426
  "text/plain": [
427
- "Uploading Dataframe: 0.00% | | Rows 0/3482 | Elapsed Time: 00:00 | Remaining Time: ?"
428
  ]
429
  },
430
  "metadata": {},
@@ -434,18 +621,18 @@
434
  "name": "stdout",
435
  "output_type": "stream",
436
  "text": [
437
- "Launching job: tsla_stock_1_offline_fg_materialization\n",
438
  "Job started successfully, you can follow the progress at \n",
439
- "https://c.app.hopsworks.ai/p/549016/jobs/named/tsla_stock_1_offline_fg_materialization/executions\n"
440
  ]
441
  },
442
  {
443
  "data": {
444
  "text/plain": [
445
- "(<hsfs.core.job.Job at 0x162ac3e50>, None)"
446
  ]
447
  },
448
- "execution_count": 15,
449
  "metadata": {},
450
  "output_type": "execute_result"
451
  }
@@ -456,7 +643,59 @@
456
  },
457
  {
458
  "cell_type": "code",
459
- "execution_count": 18,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
460
  "metadata": {},
461
  "outputs": [
462
  {
@@ -464,18 +703,18 @@
464
  "output_type": "stream",
465
  "text": [
466
  "Feature Group created successfully, explore it at \n",
467
- "https://c.app.hopsworks.ai:443/p/549016/fs/544838/fg/766341\n"
468
  ]
469
  },
470
  {
471
  "data": {
472
  "application/vnd.jupyter.widget-view+json": {
473
- "model_id": "74f0d70aeb3942c093321c530120434e",
474
  "version_major": 2,
475
  "version_minor": 0
476
  },
477
  "text/plain": [
478
- "Uploading Dataframe: 0.00% | | Rows 0/712 | Elapsed Time: 00:00 | Remaining Time: ?"
479
  ]
480
  },
481
  "metadata": {},
@@ -485,35 +724,24 @@
485
  "name": "stdout",
486
  "output_type": "stream",
487
  "text": [
488
- "Launching job: news_sentiment_1_offline_fg_materialization\n",
489
  "Job started successfully, you can follow the progress at \n",
490
- "https://c.app.hopsworks.ai/p/549016/jobs/named/news_sentiment_1_offline_fg_materialization/executions\n"
491
  ]
492
  },
493
  {
494
  "data": {
495
  "text/plain": [
496
- "(<hsfs.core.job.Job at 0x164180710>, None)"
497
  ]
498
  },
499
- "execution_count": 18,
500
  "metadata": {},
501
  "output_type": "execute_result"
502
  }
503
  ],
504
  "source": [
505
- "# Create feature group for historical news data\n",
506
- "news_df = pd.read_csv('news_articles.csv')\n",
507
- "\n",
508
- "news_sentiment_fg = fs.get_or_create_feature_group(\n",
509
- " name='news_sentiment',\n",
510
- " description='News sentiment from Polygon',\n",
511
- " version=1,\n",
512
- " primary_key=['date'],\n",
513
- " online_enabled=True,\n",
514
- ")\n",
515
- "\n",
516
- "news_sentiment_fg.insert(news_df)"
517
  ]
518
  }
519
  ],
@@ -533,7 +761,7 @@
533
  "name": "python",
534
  "nbconvert_exporter": "python",
535
  "pygments_lexer": "ipython3",
536
- "version": "3.11.4"
537
  },
538
  "orig_nbformat": 4
539
  },
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 1,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
9
+ "from dotenv import load_dotenv\n",
10
+ "import os "
11
+ ]
12
+ },
13
+ {
14
+ "cell_type": "code",
15
+ "execution_count": 2,
16
+ "metadata": {},
17
+ "outputs": [
18
+ {
19
+ "name": "stdout",
20
+ "output_type": "stream",
21
+ "text": [
22
+ "Collecting great_expectations==0.18.12\n",
23
+ " Using cached great_expectations-0.18.12-py3-none-any.whl.metadata (8.9 kB)\n",
24
+ "Requirement already satisfied: altair<5.0.0,>=4.2.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (4.2.2)\n",
25
+ "Requirement already satisfied: Click>=7.1.2 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (8.1.7)\n",
26
+ "Requirement already satisfied: colorama>=0.4.3 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (0.4.6)\n",
27
+ "Requirement already satisfied: cryptography>=3.2 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (42.0.6)\n",
28
+ "Requirement already satisfied: Ipython>=7.16.3 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (8.22.2)\n",
29
+ "Requirement already satisfied: ipywidgets>=7.5.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (8.1.2)\n",
30
+ "Requirement already satisfied: jinja2>=2.10 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (3.1.3)\n",
31
+ "Requirement already satisfied: jsonpatch>=1.22 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (1.33)\n",
32
+ "Requirement already satisfied: jsonschema>=2.5.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (4.22.0)\n",
33
+ "Requirement already satisfied: makefun<2,>=1.7.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (1.15.2)\n",
34
+ "Requirement already satisfied: marshmallow<4.0.0,>=3.7.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (3.21.2)\n",
35
+ "Requirement already satisfied: mistune>=0.8.4 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (3.0.2)\n",
36
+ "Requirement already satisfied: nbformat>=5.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (5.10.4)\n",
37
+ "Requirement already satisfied: notebook>=6.4.10 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (7.1.3)\n",
38
+ "Requirement already satisfied: packaging in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (24.0)\n",
39
+ "Requirement already satisfied: pydantic>=1.9.2 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (2.7.1)\n",
40
+ "Requirement already satisfied: pyparsing>=2.4 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (3.1.2)\n",
41
+ "Requirement already satisfied: python-dateutil>=2.8.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (2.9.0)\n",
42
+ "Requirement already satisfied: pytz>=2021.3 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (2024.1)\n",
43
+ "Requirement already satisfied: requests>=2.20 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (2.31.0)\n",
44
+ "Requirement already satisfied: ruamel.yaml<0.17.18,>=0.16 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (0.17.17)\n",
45
+ "Requirement already satisfied: scipy>=1.6.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (1.13.0)\n",
46
+ "Requirement already satisfied: tqdm>=4.59.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (4.66.4)\n",
47
+ "Requirement already satisfied: typing-extensions>=3.10.0.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (4.11.0)\n",
48
+ "Requirement already satisfied: tzlocal>=1.2 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (5.2)\n",
49
+ "Requirement already satisfied: urllib3>=1.26 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (2.2.1)\n",
50
+ "Requirement already satisfied: numpy<2.0.0,>=1.22.4 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (1.26.4)\n",
51
+ "Requirement already satisfied: pandas>=1.3.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from great_expectations==0.18.12) (1.5.1)\n",
52
+ "Requirement already satisfied: entrypoints in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from altair<5.0.0,>=4.2.1->great_expectations==0.18.12) (0.4)\n",
53
+ "Requirement already satisfied: toolz in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from altair<5.0.0,>=4.2.1->great_expectations==0.18.12) (0.12.1)\n",
54
+ "Requirement already satisfied: cffi>=1.12 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from cryptography>=3.2->great_expectations==0.18.12) (1.16.0)\n",
55
+ "Requirement already satisfied: decorator in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from Ipython>=7.16.3->great_expectations==0.18.12) (5.1.1)\n",
56
+ "Requirement already satisfied: jedi>=0.16 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from Ipython>=7.16.3->great_expectations==0.18.12) (0.19.1)\n",
57
+ "Requirement already satisfied: matplotlib-inline in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from Ipython>=7.16.3->great_expectations==0.18.12) (0.1.7)\n",
58
+ "Requirement already satisfied: prompt-toolkit<3.1.0,>=3.0.41 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from Ipython>=7.16.3->great_expectations==0.18.12) (3.0.42)\n",
59
+ "Requirement already satisfied: pygments>=2.4.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from Ipython>=7.16.3->great_expectations==0.18.12) (2.17.2)\n",
60
+ "Requirement already satisfied: stack-data in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from Ipython>=7.16.3->great_expectations==0.18.12) (0.6.2)\n",
61
+ "Requirement already satisfied: traitlets>=5.13.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from Ipython>=7.16.3->great_expectations==0.18.12) (5.14.3)\n",
62
+ "Requirement already satisfied: comm>=0.1.3 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from ipywidgets>=7.5.1->great_expectations==0.18.12) (0.2.2)\n",
63
+ "Requirement already satisfied: widgetsnbextension~=4.0.10 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from ipywidgets>=7.5.1->great_expectations==0.18.12) (4.0.10)\n",
64
+ "Requirement already satisfied: jupyterlab-widgets~=3.0.10 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from ipywidgets>=7.5.1->great_expectations==0.18.12) (3.0.10)\n",
65
+ "Requirement already satisfied: MarkupSafe>=2.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jinja2>=2.10->great_expectations==0.18.12) (2.1.5)\n",
66
+ "Requirement already satisfied: jsonpointer>=1.9 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jsonpatch>=1.22->great_expectations==0.18.12) (2.4)\n",
67
+ "Requirement already satisfied: attrs>=22.2.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jsonschema>=2.5.1->great_expectations==0.18.12) (23.2.0)\n",
68
+ "Requirement already satisfied: jsonschema-specifications>=2023.03.6 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jsonschema>=2.5.1->great_expectations==0.18.12) (2023.12.1)\n",
69
+ "Requirement already satisfied: referencing>=0.28.4 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jsonschema>=2.5.1->great_expectations==0.18.12) (0.35.1)\n",
70
+ "Requirement already satisfied: rpds-py>=0.7.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jsonschema>=2.5.1->great_expectations==0.18.12) (0.18.0)\n",
71
+ "Requirement already satisfied: fastjsonschema>=2.15 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from nbformat>=5.0->great_expectations==0.18.12) (2.19.1)\n",
72
+ "Requirement already satisfied: jupyter-core!=5.0.*,>=4.12 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from nbformat>=5.0->great_expectations==0.18.12) (5.7.2)\n",
73
+ "Requirement already satisfied: jupyter-server<3,>=2.4.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from notebook>=6.4.10->great_expectations==0.18.12) (2.14.0)\n",
74
+ "Requirement already satisfied: jupyterlab-server<3,>=2.22.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from notebook>=6.4.10->great_expectations==0.18.12) (2.27.1)\n",
75
+ "Requirement already satisfied: jupyterlab<4.2,>=4.1.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from notebook>=6.4.10->great_expectations==0.18.12) (4.1.8)\n",
76
+ "Requirement already satisfied: notebook-shim<0.3,>=0.2 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from notebook>=6.4.10->great_expectations==0.18.12) (0.2.4)\n",
77
+ "Requirement already satisfied: tornado>=6.2.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from notebook>=6.4.10->great_expectations==0.18.12) (6.3.3)\n",
78
+ "Requirement already satisfied: annotated-types>=0.4.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from pydantic>=1.9.2->great_expectations==0.18.12) (0.6.0)\n",
79
+ "Requirement already satisfied: pydantic-core==2.18.2 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from pydantic>=1.9.2->great_expectations==0.18.12) (2.18.2)\n",
80
+ "Requirement already satisfied: six>=1.5 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from python-dateutil>=2.8.1->great_expectations==0.18.12) (1.16.0)\n",
81
+ "Requirement already satisfied: charset-normalizer<4,>=2 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from requests>=2.20->great_expectations==0.18.12) (3.3.2)\n",
82
+ "Requirement already satisfied: idna<4,>=2.5 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from requests>=2.20->great_expectations==0.18.12) (3.7)\n",
83
+ "Requirement already satisfied: certifi>=2017.4.17 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from requests>=2.20->great_expectations==0.18.12) (2024.2.2)\n",
84
+ "Requirement already satisfied: tzdata in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from tzlocal>=1.2->great_expectations==0.18.12) (2024.1)\n",
85
+ "Requirement already satisfied: pycparser in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from cffi>=1.12->cryptography>=3.2->great_expectations==0.18.12) (2.22)\n",
86
+ "Requirement already satisfied: parso<0.9.0,>=0.8.3 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jedi>=0.16->Ipython>=7.16.3->great_expectations==0.18.12) (0.8.4)\n",
87
+ "Requirement already satisfied: platformdirs>=2.5 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-core!=5.0.*,>=4.12->nbformat>=5.0->great_expectations==0.18.12) (4.2.1)\n",
88
+ "Requirement already satisfied: pywin32>=300 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-core!=5.0.*,>=4.12->nbformat>=5.0->great_expectations==0.18.12) (305.1)\n",
89
+ "Requirement already satisfied: anyio>=3.1.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (4.3.0)\n",
90
+ "Requirement already satisfied: argon2-cffi>=21.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (23.1.0)\n",
91
+ "Requirement already satisfied: jupyter-client>=7.4.4 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (8.6.1)\n",
92
+ "Requirement already satisfied: jupyter-events>=0.9.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (0.10.0)\n",
93
+ "Requirement already satisfied: jupyter-server-terminals>=0.4.4 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (0.5.3)\n",
94
+ "Requirement already satisfied: nbconvert>=6.4.4 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (7.16.4)\n",
95
+ "Requirement already satisfied: overrides>=5.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (7.7.0)\n",
96
+ "Requirement already satisfied: prometheus-client>=0.9 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (0.20.0)\n",
97
+ "Requirement already satisfied: pywinpty>=2.0.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (2.0.13)\n",
98
+ "Requirement already satisfied: pyzmq>=24 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (25.1.2)\n",
99
+ "Requirement already satisfied: send2trash>=1.8.2 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (1.8.3)\n",
100
+ "Requirement already satisfied: terminado>=0.8.3 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (0.18.1)\n",
101
+ "Requirement already satisfied: websocket-client>=1.7 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (1.8.0)\n",
102
+ "Requirement already satisfied: async-lru>=1.0.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyterlab<4.2,>=4.1.1->notebook>=6.4.10->great_expectations==0.18.12) (2.0.4)\n",
103
+ "Requirement already satisfied: httpx>=0.25.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyterlab<4.2,>=4.1.1->notebook>=6.4.10->great_expectations==0.18.12) (0.27.0)\n",
104
+ "Requirement already satisfied: ipykernel>=6.5.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyterlab<4.2,>=4.1.1->notebook>=6.4.10->great_expectations==0.18.12) (6.29.3)\n",
105
+ "Requirement already satisfied: jupyter-lsp>=2.0.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyterlab<4.2,>=4.1.1->notebook>=6.4.10->great_expectations==0.18.12) (2.2.5)\n",
106
+ "Requirement already satisfied: babel>=2.10 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyterlab-server<3,>=2.22.1->notebook>=6.4.10->great_expectations==0.18.12) (2.14.0)\n",
107
+ "Requirement already satisfied: json5>=0.9.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyterlab-server<3,>=2.22.1->notebook>=6.4.10->great_expectations==0.18.12) (0.9.25)\n",
108
+ "Requirement already satisfied: wcwidth in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from prompt-toolkit<3.1.0,>=3.0.41->Ipython>=7.16.3->great_expectations==0.18.12) (0.2.13)\n",
109
+ "Requirement already satisfied: executing>=1.2.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from stack-data->Ipython>=7.16.3->great_expectations==0.18.12) (2.0.1)\n",
110
+ "Requirement already satisfied: asttokens>=2.1.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from stack-data->Ipython>=7.16.3->great_expectations==0.18.12) (2.4.1)\n",
111
+ "Requirement already satisfied: pure-eval in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from stack-data->Ipython>=7.16.3->great_expectations==0.18.12) (0.2.2)\n",
112
+ "Requirement already satisfied: sniffio>=1.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from anyio>=3.1.0->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (1.3.1)\n",
113
+ "Requirement already satisfied: argon2-cffi-bindings in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from argon2-cffi>=21.1->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (21.2.0)\n",
114
+ "Requirement already satisfied: httpcore==1.* in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from httpx>=0.25.0->jupyterlab<4.2,>=4.1.1->notebook>=6.4.10->great_expectations==0.18.12) (1.0.5)\n",
115
+ "Requirement already satisfied: h11<0.15,>=0.13 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from httpcore==1.*->httpx>=0.25.0->jupyterlab<4.2,>=4.1.1->notebook>=6.4.10->great_expectations==0.18.12) (0.14.0)\n",
116
+ "Requirement already satisfied: debugpy>=1.6.5 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from ipykernel>=6.5.0->jupyterlab<4.2,>=4.1.1->notebook>=6.4.10->great_expectations==0.18.12) (1.6.7)\n",
117
+ "Requirement already satisfied: nest-asyncio in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from ipykernel>=6.5.0->jupyterlab<4.2,>=4.1.1->notebook>=6.4.10->great_expectations==0.18.12) (1.6.0)\n",
118
+ "Requirement already satisfied: psutil in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from ipykernel>=6.5.0->jupyterlab<4.2,>=4.1.1->notebook>=6.4.10->great_expectations==0.18.12) (5.9.0)\n",
119
+ "Requirement already satisfied: python-json-logger>=2.0.4 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (2.0.7)\n",
120
+ "Requirement already satisfied: pyyaml>=5.3 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (6.0.1)\n",
121
+ "Requirement already satisfied: rfc3339-validator in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (0.1.4)\n",
122
+ "Requirement already satisfied: rfc3986-validator>=0.1.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (0.1.1)\n",
123
+ "Requirement already satisfied: beautifulsoup4 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from nbconvert>=6.4.4->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (4.12.3)\n",
124
+ "Requirement already satisfied: bleach!=5.0.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from nbconvert>=6.4.4->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (6.1.0)\n",
125
+ "Requirement already satisfied: defusedxml in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from nbconvert>=6.4.4->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (0.7.1)\n",
126
+ "Requirement already satisfied: jupyterlab-pygments in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from nbconvert>=6.4.4->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (0.3.0)\n",
127
+ "Requirement already satisfied: nbclient>=0.5.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from nbconvert>=6.4.4->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (0.10.0)\n",
128
+ "Requirement already satisfied: pandocfilters>=1.4.1 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from nbconvert>=6.4.4->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (1.5.1)\n",
129
+ "Requirement already satisfied: tinycss2 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from nbconvert>=6.4.4->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (1.3.0)\n",
130
+ "Requirement already satisfied: webencodings in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from bleach!=5.0.0->nbconvert>=6.4.4->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (0.5.1)\n",
131
+ "Requirement already satisfied: fqdn in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (1.5.1)\n",
132
+ "Requirement already satisfied: isoduration in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (20.11.0)\n",
133
+ "Requirement already satisfied: uri-template in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (1.3.0)\n",
134
+ "Requirement already satisfied: webcolors>=1.11 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (1.13)\n",
135
+ "Requirement already satisfied: soupsieve>1.2 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from beautifulsoup4->nbconvert>=6.4.4->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (2.5)\n",
136
+ "Requirement already satisfied: arrow>=0.15.0 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from isoduration->jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (1.3.0)\n",
137
+ "Requirement already satisfied: types-python-dateutil>=2.8.10 in c:\\users\\frede\\onedrive\\dokumenter\\master\\mlops\\mlops_mod-2\\.conda\\lib\\site-packages (from arrow>=0.15.0->isoduration->jsonschema[format-nongpl]>=4.18.0->jupyter-events>=0.9.0->jupyter-server<3,>=2.4.0->notebook>=6.4.10->great_expectations==0.18.12) (2.9.0.20240316)\n",
138
+ "Using cached great_expectations-0.18.12-py3-none-any.whl (5.4 MB)\n",
139
+ "Installing collected packages: great_expectations\n",
140
+ " Attempting uninstall: great_expectations\n",
141
+ " Found existing installation: great-expectations 0.18.13\n",
142
+ " Uninstalling great-expectations-0.18.13:\n",
143
+ " Successfully uninstalled great-expectations-0.18.13\n",
144
+ "Successfully installed great_expectations-0.18.12\n"
145
+ ]
146
+ }
147
+ ],
148
+ "source": [
149
+ "!pip install great_expectations==0.18.12"
150
  ]
151
  },
152
  {
153
  "cell_type": "code",
154
  "execution_count": 4,
155
  "metadata": {},
156
+ "outputs": [],
157
+ "source": [
158
+ "# Import necessary libraries\n",
159
+ "import pandas as pd # For data manipulation using DataFrames\n",
160
+ "import numpy as np # For numerical operations\n",
161
+ "import matplotlib.pyplot as plt # For data visualization\n",
162
+ "import os # For operating system-related tasks\n",
163
+ "import joblib # For saving and loading models\n",
164
+ "import hopsworks # For getting access to hopsworks\n",
165
+ "\n",
166
+ "\n",
167
+ "\n",
168
+ "# Import specific modules from scikit-learn\n",
169
+ "from sklearn.preprocessing import StandardScaler, OneHotEncoder # For data preprocessing\n",
170
+ "from sklearn.metrics import accuracy_score # For evaluating model accuracy"
171
+ ]
172
+ },
173
+ {
174
+ "cell_type": "code",
175
+ "execution_count": 3,
176
+ "metadata": {},
177
  "outputs": [
178
  {
179
  "name": "stdout",
 
181
  "text": [
182
  " 1. open 2. high 3. low 4. close 5. volume\n",
183
  "date \n",
184
+ "2024-05-03 182.10 184.78 178.4200 181.19 75491539.0\n",
185
+ "2024-05-02 182.86 184.60 176.0200 180.01 89148041.0\n",
186
+ "2024-05-01 182.00 185.86 179.0100 179.99 92829719.0\n",
187
  "2024-04-30 186.98 190.95 182.8401 183.28 127031787.0\n",
188
+ "2024-04-29 188.42 198.87 184.5400 194.05 243869678.0\n"
 
 
 
189
  ]
190
  }
191
  ],
 
271
  ],
272
  "source": [
273
  "# Define your file path and name\n",
274
+ "#file_path = 'TSLA_stock_price.csv' # Customize the path and filename\n",
275
  "\n",
276
  "# Save the DataFrame to CSV\n",
277
+ "#stock_data.to_csv(file_path)\n",
278
  "\n",
279
+ "#print(f\"Data saved to {file_path}\")\n"
280
  ]
281
  },
282
  {
283
  "cell_type": "code",
284
+ "execution_count": 9,
285
  "metadata": {},
286
  "outputs": [
287
  {
288
  "name": "stdout",
289
  "output_type": "stream",
290
  "text": [
291
+ " date 1. open 2. high 3. low 4. close 5. volume ticker\n",
292
+ "0 2024-05-02 182.86 184.60 176.0200 180.01 89148041.0 TSLA\n",
293
+ "1 2024-05-01 182.00 185.86 179.0100 179.99 92829719.0 TSLA\n",
294
+ "2 2024-04-30 186.98 190.95 182.8401 183.28 127031787.0 TSLA\n",
295
+ "3 2024-04-29 188.42 198.87 184.5400 194.05 243869678.0 TSLA\n",
296
+ "4 2024-04-26 168.85 172.12 166.3700 168.29 109815725.0 TSLA\n"
297
  ]
298
  }
299
  ],
300
  "source": [
301
  "# Load and display the data from CSV to confirm\n",
302
+ "tsla_df = pd.read_csv('TSLA_stock_price.csv')\n",
303
+ "print(tsla_df.head())\n",
304
+ " "
305
  ]
306
  },
307
  {
308
  "cell_type": "code",
309
+ "execution_count": 5,
310
  "metadata": {},
311
  "outputs": [
312
  {
313
  "name": "stdout",
314
  "output_type": "stream",
315
  "text": [
316
+ "Connected. Call `.close()` to terminate connection gracefully.\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
317
  "\n",
318
+ "Logged in to project, explore it here https://c.app.hopsworks.ai:443/p/549015\n",
319
  "Connected. Call `.close()` to terminate connection gracefully.\n"
320
  ]
321
  }
322
  ],
323
  "source": [
324
+ "api_key = os.environ.get('hopsworks_api')\n",
325
+ "project = hopsworks.login(api_key_value=api_key)\n",
326
+ "fs = project.get_feature_store()"
 
327
  ]
328
  },
329
  {
330
  "cell_type": "code",
331
+ "execution_count": 11,
332
  "metadata": {},
333
  "outputs": [],
334
  "source": [
335
+ "import re "
336
  ]
337
  },
338
  {
339
  "cell_type": "code",
340
+ "execution_count": 12,
341
  "metadata": {},
342
  "outputs": [],
343
  "source": [
344
+ "def clean_column_name(name):\n",
345
  " # Remove all non-letter characters\n",
346
+ " cleaned_name = re.sub(r'[^a-zA-Z]', '', name)\n",
347
+ " return cleaned_name\n"
348
  ]
349
  },
350
  {
351
  "cell_type": "code",
352
+ "execution_count": 13,
353
  "metadata": {},
354
  "outputs": [
355
  {
 
379
  " <th>3. low</th>\n",
380
  " <th>4. close</th>\n",
381
  " <th>5. volume</th>\n",
382
+ " <th>ticker</th>\n",
383
  " </tr>\n",
384
  " </thead>\n",
385
  " <tbody>\n",
386
  " <tr>\n",
387
  " <th>0</th>\n",
388
+ " <td>2024-05-02</td>\n",
389
+ " <td>182.86</td>\n",
390
+ " <td>184.6000</td>\n",
391
+ " <td>176.0200</td>\n",
392
+ " <td>180.01</td>\n",
393
+ " <td>89148041.0</td>\n",
394
+ " <td>TSLA</td>\n",
395
  " </tr>\n",
396
  " <tr>\n",
397
  " <th>1</th>\n",
398
+ " <td>2024-05-01</td>\n",
399
+ " <td>182.00</td>\n",
400
+ " <td>185.8600</td>\n",
401
+ " <td>179.0100</td>\n",
402
+ " <td>179.99</td>\n",
403
+ " <td>92829719.0</td>\n",
404
+ " <td>TSLA</td>\n",
405
  " </tr>\n",
406
  " <tr>\n",
407
  " <th>2</th>\n",
408
+ " <td>2024-04-30</td>\n",
409
+ " <td>186.98</td>\n",
410
+ " <td>190.9500</td>\n",
411
+ " <td>182.8401</td>\n",
412
+ " <td>183.28</td>\n",
413
+ " <td>127031787.0</td>\n",
414
+ " <td>TSLA</td>\n",
415
  " </tr>\n",
416
  " <tr>\n",
417
  " <th>3</th>\n",
418
+ " <td>2024-04-29</td>\n",
419
+ " <td>188.42</td>\n",
420
+ " <td>198.8700</td>\n",
421
+ " <td>184.5400</td>\n",
422
+ " <td>194.05</td>\n",
423
+ " <td>243869678.0</td>\n",
424
+ " <td>TSLA</td>\n",
425
  " </tr>\n",
426
  " <tr>\n",
427
  " <th>4</th>\n",
428
+ " <td>2024-04-26</td>\n",
429
+ " <td>168.85</td>\n",
430
+ " <td>172.1200</td>\n",
431
+ " <td>166.3700</td>\n",
432
+ " <td>168.29</td>\n",
433
+ " <td>109815725.0</td>\n",
434
+ " <td>TSLA</td>\n",
435
  " </tr>\n",
436
  " <tr>\n",
437
  " <th>...</th>\n",
 
441
  " <td>...</td>\n",
442
  " <td>...</td>\n",
443
  " <td>...</td>\n",
444
+ " <td>...</td>\n",
445
  " </tr>\n",
446
  " <tr>\n",
447
+ " <th>3480</th>\n",
448
  " <td>2010-07-06</td>\n",
449
  " <td>20.00</td>\n",
450
  " <td>20.0000</td>\n",
451
+ " <td>15.8300</td>\n",
452
  " <td>16.11</td>\n",
453
  " <td>6866900.0</td>\n",
454
+ " <td>TSLA</td>\n",
455
  " </tr>\n",
456
  " <tr>\n",
457
+ " <th>3481</th>\n",
458
  " <td>2010-07-02</td>\n",
459
  " <td>23.00</td>\n",
460
  " <td>23.1000</td>\n",
461
+ " <td>18.7100</td>\n",
462
  " <td>19.20</td>\n",
463
  " <td>5139800.0</td>\n",
464
+ " <td>TSLA</td>\n",
465
  " </tr>\n",
466
  " <tr>\n",
467
+ " <th>3482</th>\n",
468
  " <td>2010-07-01</td>\n",
469
  " <td>25.00</td>\n",
470
  " <td>25.9200</td>\n",
471
+ " <td>20.2700</td>\n",
472
  " <td>21.96</td>\n",
473
  " <td>8218800.0</td>\n",
474
+ " <td>TSLA</td>\n",
475
  " </tr>\n",
476
  " <tr>\n",
477
+ " <th>3483</th>\n",
478
  " <td>2010-06-30</td>\n",
479
  " <td>25.79</td>\n",
480
  " <td>30.4192</td>\n",
481
+ " <td>23.3000</td>\n",
482
  " <td>23.83</td>\n",
483
  " <td>17187100.0</td>\n",
484
+ " <td>TSLA</td>\n",
485
  " </tr>\n",
486
  " <tr>\n",
487
+ " <th>3484</th>\n",
488
  " <td>2010-06-29</td>\n",
489
  " <td>19.00</td>\n",
490
  " <td>25.0000</td>\n",
491
+ " <td>17.5400</td>\n",
492
  " <td>23.89</td>\n",
493
  " <td>18766300.0</td>\n",
494
+ " <td>TSLA</td>\n",
495
  " </tr>\n",
496
  " </tbody>\n",
497
  "</table>\n",
498
+ "<p>3485 rows × 7 columns</p>\n",
499
  "</div>"
500
  ],
501
  "text/plain": [
502
+ " date 1. open 2. high 3. low 4. close 5. volume ticker\n",
503
+ "0 2024-05-02 182.86 184.6000 176.0200 180.01 89148041.0 TSLA\n",
504
+ "1 2024-05-01 182.00 185.8600 179.0100 179.99 92829719.0 TSLA\n",
505
+ "2 2024-04-30 186.98 190.9500 182.8401 183.28 127031787.0 TSLA\n",
506
+ "3 2024-04-29 188.42 198.8700 184.5400 194.05 243869678.0 TSLA\n",
507
+ "4 2024-04-26 168.85 172.1200 166.3700 168.29 109815725.0 TSLA\n",
508
+ "... ... ... ... ... ... ... ...\n",
509
+ "3480 2010-07-06 20.00 20.0000 15.8300 16.11 6866900.0 TSLA\n",
510
+ "3481 2010-07-02 23.00 23.1000 18.7100 19.20 5139800.0 TSLA\n",
511
+ "3482 2010-07-01 25.00 25.9200 20.2700 21.96 8218800.0 TSLA\n",
512
+ "3483 2010-06-30 25.79 30.4192 23.3000 23.83 17187100.0 TSLA\n",
513
+ "3484 2010-06-29 19.00 25.0000 17.5400 23.89 18766300.0 TSLA\n",
514
  "\n",
515
+ "[3485 rows x 7 columns]"
516
  ]
517
  },
518
+ "execution_count": 13,
519
  "metadata": {},
520
  "output_type": "execute_result"
521
  }
522
  ],
523
  "source": [
524
+ "tsla_df"
525
  ]
526
  },
527
  {
528
  "cell_type": "code",
529
+ "execution_count": 14,
530
  "metadata": {},
531
  "outputs": [],
532
  "source": [
533
  "# Assuming 'tsla_df' is your DataFrame\n",
534
+ "tsla_df.columns = [clean_column_name(col) for col in tsla_df.columns]\n"
535
  ]
536
  },
537
  {
538
  "cell_type": "code",
539
+ "execution_count": 15,
540
  "metadata": {},
541
  "outputs": [
542
  {
543
  "name": "stdout",
544
  "output_type": "stream",
545
  "text": [
546
+ "Index(['date', 'open', 'high', 'low', 'close', 'volume', 'ticker'], dtype='object')\n"
547
  ]
548
  }
549
  ],
550
  "source": [
551
+ "print(tsla_df.columns)\n"
552
  ]
553
  },
554
  {
555
  "cell_type": "code",
556
+ "execution_count": 39,
557
  "metadata": {},
558
  "outputs": [],
559
+ "source": [
560
+ "import pandas as pd\n",
561
+ "\n",
562
+ "# Assuming tsla_df is your pandas DataFrame\n",
563
+ "# Convert the \"date\" column to timestamp\n",
564
+ "tsla_df['date'] = pd.to_datetime(tsla_df['date'])\n"
565
+ ]
566
+ },
567
+ {
568
+ "cell_type": "code",
569
+ "execution_count": 43,
570
+ "metadata": {},
571
+ "outputs": [
572
+ {
573
+ "name": "stdout",
574
+ "output_type": "stream",
575
+ "text": [
576
+ "2024-05-04 20:50:09,340 WARNING: DeprecationWarning: Providing event_time as a single-element list is deprecated and will be dropped in future versions. Provide the feature_name string instead.\n",
577
+ "\n"
578
+ ]
579
+ }
580
+ ],
581
  "source": [
582
  "# Define a feature group\n",
583
  "tesla_fg = fs.get_or_create_feature_group(\n",
584
+ " name=\"tesla_stock\",\n",
585
  " description=\"Tesla stock dataset from alpha vantage\",\n",
586
+ " version=3,\n",
587
+ " primary_key=[\"ticker\"],\n",
588
+ " event_time=['date'],\n",
589
+ " online_enabled=False,\n",
590
  ")"
591
  ]
592
  },
593
  {
594
  "cell_type": "code",
595
+ "execution_count": 45,
596
  "metadata": {},
597
  "outputs": [
598
+ {
599
+ "name": "stdout",
600
+ "output_type": "stream",
601
+ "text": [
602
+ "Feature Group created successfully, explore it at \n",
603
+ "https://c.app.hopsworks.ai:443/p/549015/fs/544840/fg/782673\n"
604
+ ]
605
+ },
606
  {
607
  "data": {
608
  "application/vnd.jupyter.widget-view+json": {
609
+ "model_id": "4971a5b3b0854f14ad95992b88f71a16",
610
  "version_major": 2,
611
  "version_minor": 0
612
  },
613
  "text/plain": [
614
+ "Uploading Dataframe: 0.00% | | Rows 0/3485 | Elapsed Time: 00:00 | Remaining Time: ?"
615
  ]
616
  },
617
  "metadata": {},
 
621
  "name": "stdout",
622
  "output_type": "stream",
623
  "text": [
624
+ "Launching job: tesla_stock_3_offline_fg_materialization\n",
625
  "Job started successfully, you can follow the progress at \n",
626
+ "https://c.app.hopsworks.ai/p/549015/jobs/named/tesla_stock_3_offline_fg_materialization/executions\n"
627
  ]
628
  },
629
  {
630
  "data": {
631
  "text/plain": [
632
+ "(<hsfs.core.job.Job at 0x21d84b9dc10>, None)"
633
  ]
634
  },
635
+ "execution_count": 45,
636
  "metadata": {},
637
  "output_type": "execute_result"
638
  }
 
643
  },
644
  {
645
  "cell_type": "code",
646
+ "execution_count": 19,
647
+ "metadata": {},
648
+ "outputs": [],
649
+ "source": [
650
+ "news_df = pd.read_csv('news_articles_ema.csv')\n"
651
+ ]
652
+ },
653
+ {
654
+ "cell_type": "code",
655
+ "execution_count": 29,
656
+ "metadata": {},
657
+ "outputs": [],
658
+ "source": [
659
+ "news_df_updated = news_df.drop(columns=['exp_mean_7_days'])"
660
+ ]
661
+ },
662
+ {
663
+ "cell_type": "code",
664
+ "execution_count": 46,
665
+ "metadata": {},
666
+ "outputs": [],
667
+ "source": [
668
+ "news_df_updated['date'] = pd.to_datetime(news_df_updated['date'])"
669
+ ]
670
+ },
671
+ {
672
+ "cell_type": "code",
673
+ "execution_count": 49,
674
+ "metadata": {},
675
+ "outputs": [
676
+ {
677
+ "name": "stdout",
678
+ "output_type": "stream",
679
+ "text": [
680
+ "2024-05-04 20:53:43,335 WARNING: DeprecationWarning: Providing event_time as a single-element list is deprecated and will be dropped in future versions. Provide the feature_name string instead.\n",
681
+ "\n"
682
+ ]
683
+ }
684
+ ],
685
+ "source": [
686
+ "news_sentiment_fg = fs.get_or_create_feature_group(\n",
687
+ " name='news_sentiment_updated',\n",
688
+ " description='News sentiment from Polygon',\n",
689
+ " version=2,\n",
690
+ " primary_key=['ticker'],\n",
691
+ " event_time=['date'],\n",
692
+ " online_enabled=False,\n",
693
+ ")"
694
+ ]
695
+ },
696
+ {
697
+ "cell_type": "code",
698
+ "execution_count": 50,
699
  "metadata": {},
700
  "outputs": [
701
  {
 
703
  "output_type": "stream",
704
  "text": [
705
  "Feature Group created successfully, explore it at \n",
706
+ "https://c.app.hopsworks.ai:443/p/549015/fs/544840/fg/780662\n"
707
  ]
708
  },
709
  {
710
  "data": {
711
  "application/vnd.jupyter.widget-view+json": {
712
+ "model_id": "868940acc1d74ad987a98daee01a2b75",
713
  "version_major": 2,
714
  "version_minor": 0
715
  },
716
  "text/plain": [
717
+ "Uploading Dataframe: 0.00% | | Rows 0/720 | Elapsed Time: 00:00 | Remaining Time: ?"
718
  ]
719
  },
720
  "metadata": {},
 
724
  "name": "stdout",
725
  "output_type": "stream",
726
  "text": [
727
+ "Launching job: news_sentiment_updated_2_offline_fg_materialization\n",
728
  "Job started successfully, you can follow the progress at \n",
729
+ "https://c.app.hopsworks.ai/p/549015/jobs/named/news_sentiment_updated_2_offline_fg_materialization/executions\n"
730
  ]
731
  },
732
  {
733
  "data": {
734
  "text/plain": [
735
+ "(<hsfs.core.job.Job at 0x21d85223610>, None)"
736
  ]
737
  },
738
+ "execution_count": 50,
739
  "metadata": {},
740
  "output_type": "execute_result"
741
  }
742
  ],
743
  "source": [
744
+ "news_sentiment_fg.insert(news_df_updated)"
 
 
 
 
 
 
 
 
 
 
 
745
  ]
746
  }
747
  ],
 
761
  "name": "python",
762
  "nbconvert_exporter": "python",
763
  "pygments_lexer": "ipython3",
764
+ "version": "3.11.9"
765
  },
766
  "orig_nbformat": 4
767
  },
feature_pipeline.py CHANGED
@@ -3,51 +3,64 @@ from dotenv import load_dotenv
3
  import os
4
 
5
  # %%
6
- from alpha_vantage.timeseries import TimeSeries
7
- import pandas as pd
8
 
9
- load_dotenv()
 
 
 
 
 
 
 
10
 
11
- api_key = os.environ.get('stocks_api') # Replace this with your actual API key
12
- ts = TimeSeries(key=api_key, output_format='pandas')
13
 
14
- # Fetch daily adjusted stock prices; adjust the symbol as needed
15
- data, meta_data = ts.get_daily(symbol='TSLA', outputsize='full')
16
 
17
- print(data.head())
 
 
18
 
19
  # %%
 
 
20
 
 
 
 
 
 
 
 
 
 
21
 
22
  # %%
23
- data.info()
24
 
25
  # %%
26
- meta_data
27
 
28
  # %%
29
  # Define your file path and name
30
- file_path = 'TSLA_stock_price.csv' # Customize the path and filename
31
 
32
  # Save the DataFrame to CSV
33
- data.to_csv(file_path)
34
 
35
- print(f"Data saved to {file_path}")
36
 
37
 
38
  # %%
39
  # Load and display the data from CSV to confirm
40
- tsla_df = pd.read_csv(file_path)
41
  print(tsla_df.head())
42
-
43
 
44
  # %%
45
- import hopsworks
46
-
47
- project = hopsworks.login()
48
  fs = project.get_feature_store()
49
 
50
-
51
  # %%
52
  import re
53
 
@@ -70,31 +83,49 @@ tsla_df.columns = [clean_column_name(col) for col in tsla_df.columns]
70
  print(tsla_df.columns)
71
 
72
 
 
 
 
 
 
 
 
 
73
  # %%
74
  # Define a feature group
75
  tesla_fg = fs.get_or_create_feature_group(
76
- name="tsla_stock",
77
  description="Tesla stock dataset from alpha vantage",
78
- version=1,
79
- primary_key=["date"],
80
- online_enabled=True,
 
81
  )
82
 
83
  # %%
84
  tesla_fg.insert(tsla_df, write_options={"wait_for_job" : False})
85
 
86
  # %%
87
- # Create feature group for historical news data
88
- news_df = pd.read_csv('news_articles.csv')
89
 
 
 
 
 
 
 
 
90
  news_sentiment_fg = fs.get_or_create_feature_group(
91
- name='news_sentiment',
92
  description='News sentiment from Polygon',
93
- version=1,
94
- primary_key=['date'],
95
- online_enabled=True,
 
96
  )
97
 
98
- news_sentiment_fg.insert(news_df)
 
99
 
100
 
 
3
  import os
4
 
5
  # %%
6
+ #!pip install great_expectations==0.18.12
 
7
 
8
+ # %%
9
+ # Import necessary libraries
10
+ import pandas as pd # For data manipulation using DataFrames
11
+ import numpy as np # For numerical operations
12
+ import matplotlib.pyplot as plt # For data visualization
13
+ import os # For operating system-related tasks
14
+ import joblib # For saving and loading models
15
+ import hopsworks # For getting access to hopsworks
16
 
 
 
17
 
 
 
18
 
19
+ # Import specific modules from scikit-learn
20
+ from sklearn.preprocessing import StandardScaler, OneHotEncoder # For data preprocessing
21
+ from sklearn.metrics import accuracy_score # For evaluating model accuracy
22
 
23
  # %%
24
+ #from alpha_vantage.timeseries import TimeSeries
25
+ #import pandas as pd
26
 
27
+ #load_dotenv()
28
+
29
+ #api_key = os.environ.get('stocks_api') # Replace this with your actual API key
30
+ #ts = TimeSeries(key=api_key, output_format='pandas')
31
+
32
+ # Fetch daily adjusted stock prices; adjust the symbol as needed
33
+ #data, meta_data = ts.get_daily(symbol='TSLA', outputsize='full')
34
+
35
+ #print(data.head())
36
 
37
  # %%
38
+ #data.info()
39
 
40
  # %%
41
+ #meta_data
42
 
43
  # %%
44
  # Define your file path and name
45
+ #file_path = 'TSLA_stock_price.csv' # Customize the path and filename
46
 
47
  # Save the DataFrame to CSV
48
+ #stock_data.to_csv(file_path)
49
 
50
+ #print(f"Data saved to {file_path}")
51
 
52
 
53
  # %%
54
  # Load and display the data from CSV to confirm
55
+ tsla_df = pd.read_csv('TSLA_stock_price.csv')
56
  print(tsla_df.head())
57
+
58
 
59
  # %%
60
+ api_key = os.environ.get('hopsworks_api')
61
+ project = hopsworks.login(api_key_value=api_key)
 
62
  fs = project.get_feature_store()
63
 
 
64
  # %%
65
  import re
66
 
 
83
  print(tsla_df.columns)
84
 
85
 
86
+ # %%
87
+ import pandas as pd
88
+
89
+ # Assuming tsla_df is your pandas DataFrame
90
+ # Convert the "date" column to timestamp
91
+ tsla_df['date'] = pd.to_datetime(tsla_df['date'])
92
+
93
+
94
  # %%
95
  # Define a feature group
96
  tesla_fg = fs.get_or_create_feature_group(
97
+ name="tesla_stock",
98
  description="Tesla stock dataset from alpha vantage",
99
+ version=3,
100
+ primary_key=["ticker"],
101
+ event_time=['date'],
102
+ online_enabled=False,
103
  )
104
 
105
  # %%
106
  tesla_fg.insert(tsla_df, write_options={"wait_for_job" : False})
107
 
108
  # %%
109
+ news_df = pd.read_csv('news_articles_ema.csv')
110
+
111
 
112
+ # %%
113
+ news_df_updated = news_df.drop(columns=['exp_mean_7_days'])
114
+
115
+ # %%
116
+ news_df_updated['date'] = pd.to_datetime(news_df_updated['date'])
117
+
118
+ # %%
119
  news_sentiment_fg = fs.get_or_create_feature_group(
120
+ name='news_sentiment_updated',
121
  description='News sentiment from Polygon',
122
+ version=2,
123
+ primary_key=['ticker'],
124
+ event_time=['date'],
125
+ online_enabled=False,
126
  )
127
 
128
+ # %%
129
+ news_sentiment_fg.insert(news_df_updated)
130
 
131
 
feature_view_freddie.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 17,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
@@ -23,9 +23,73 @@
23
  },
24
  {
25
  "cell_type": "code",
26
- "execution_count": 18,
27
  "metadata": {},
28
- "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  "source": [
30
  "from feature_pipeline import tesla_fg\n",
31
  "from feature_pipeline import news_sentiment_fg"
@@ -33,7 +97,7 @@
33
  },
34
  {
35
  "cell_type": "code",
36
- "execution_count": 19,
37
  "metadata": {},
38
  "outputs": [
39
  {
@@ -42,7 +106,7 @@
42
  "True"
43
  ]
44
  },
45
- "execution_count": 19,
46
  "metadata": {},
47
  "output_type": "execute_result"
48
  }
@@ -56,7 +120,7 @@
56
  },
57
  {
58
  "cell_type": "code",
59
- "execution_count": 20,
60
  "metadata": {},
61
  "outputs": [
62
  {
@@ -79,86 +143,133 @@
79
  },
80
  {
81
  "cell_type": "code",
82
- "execution_count": 28,
83
  "metadata": {},
84
  "outputs": [],
85
  "source": [
86
  "def create_stocks_feature_view(fs, version):\n",
87
  "\n",
88
- " #Loading in the feature groups\n",
89
- " tesla_fg = fs.get_feature_group('tsla_stock', version = 1)\n",
90
- " news_sentiment_fg = fs.get_feature_group('news_sentiment', version = 1)\n",
91
  "\n",
92
- " ds_query = tesla_fg.select(['date','open', 'high', 'low', 'closed', 'volume'])\\\n",
93
- " .join(news_sentiment_fg.select_except(['time', 'amp_url', 'image_url']))\n",
94
- " \n",
95
- " transformation_functions = {\n",
96
- " 'open': fs.get_transformation_function(name='min_max_scaler'),\n",
97
- " 'high': fs.get_transformation_function(name='min_max_scaler'),\n",
98
- " 'low' : fs.get_transformation_function(name='min_max_scaler'),\n",
99
- " 'closed' : fs.get_transformation_function(name='min_max_scaler'),\n",
100
- " 'volume' : fs.get_transformation_function(name='min_max_scaler'),\n",
101
- " 'sentiment' : fs.get_transformation_function(name='min_max_scaler'),\n",
102
- " }\n",
103
  "\n",
104
- " return (fs.create_tesla_feature_view(\n",
105
- " name = 'tsla_stocks_fv',\n",
106
- " query = ds_query,\n",
107
- " labels=['date']\n",
108
- " ), tesla_fg)"
109
  ]
110
  },
111
  {
112
  "cell_type": "code",
113
- "execution_count": 29,
114
  "metadata": {},
115
  "outputs": [
116
- {
117
- "name": "stdout",
118
- "output_type": "stream",
119
- "text": [
120
- "[<hsfs.feature_group.FeatureGroup object at 0x000001A5AA2B98D0>]\n",
121
- "[<hsfs.feature_group.FeatureGroup object at 0x000001A5A9D90F90>]\n"
122
- ]
123
- },
124
  {
125
  "ename": "RestAPIError",
126
- "evalue": "Metadata operation error: (url: https://c.app.hopsworks.ai/hopsworks-api/api/project/549015/featurestores/544840/transformationfunctions). Server response: \nHTTP code: 404, HTTP reason: Not Found, body: b'{\"errorCode\":270046,\"usrMsg\":\"HOPSFS Connector: HOPSFS\",\"errorMsg\":\"HopsFs Connector not found\"}', error code: 270046, error msg: HopsFs Connector not found, user msg: HOPSFS Connector: HOPSFS",
127
  "output_type": "error",
128
  "traceback": [
129
  "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
130
  "\u001b[1;31mRestAPIError\u001b[0m Traceback (most recent call last)",
131
- "Cell \u001b[1;32mIn[29], line 2\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m----> 2\u001b[0m feature_view \u001b[38;5;241m=\u001b[39m \u001b[43mfs\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_feature_view\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtsla_stocks_fv\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mversion\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 3\u001b[0m tesla_fg \u001b[38;5;241m=\u001b[39m fs\u001b[38;5;241m.\u001b[39mget_feature_group(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtsla_stock\u001b[39m\u001b[38;5;124m'\u001b[39m, version\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m)\n",
132
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\usage.py:212\u001b[0m, in \u001b[0;36mmethod_logger.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 211\u001b[0m exception \u001b[38;5;241m=\u001b[39m e\n\u001b[1;32m--> 212\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[0;32m 213\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n",
133
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\usage.py:208\u001b[0m, in \u001b[0;36mmethod_logger.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 206\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 207\u001b[0m \u001b[38;5;66;03m# Call the original method\u001b[39;00m\n\u001b[1;32m--> 208\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 209\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m result\n",
134
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\feature_store.py:1661\u001b[0m, in \u001b[0;36mFeatureStore.get_feature_view\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 1660\u001b[0m version \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mDEFAULT_VERSION\n\u001b[1;32m-> 1661\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_feature_view_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mversion\u001b[49m\u001b[43m)\u001b[49m\n",
135
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\core\\feature_view_engine.py:127\u001b[0m, in \u001b[0;36mFeatureViewEngine.get\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 126\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m version:\n\u001b[1;32m--> 127\u001b[0m fv \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_feature_view_api\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_by_name_version\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mversion\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 128\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mattach_transformation_function(fv)\n",
136
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\core\\feature_view_api.py:113\u001b[0m, in \u001b[0;36mFeatureViewApi.get_by_name_version\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 112\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 113\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n",
137
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\core\\feature_view_api.py:101\u001b[0m, in \u001b[0;36mFeatureViewApi.get_by_name_version\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 99\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 100\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m feature_view\u001b[38;5;241m.\u001b[39mFeatureView\u001b[38;5;241m.\u001b[39mfrom_response_json(\n\u001b[1;32m--> 101\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_send_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 102\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_GET\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mexpand\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mquery\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfeatures\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m}\u001b[49m\n\u001b[0;32m 103\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 104\u001b[0m )\n\u001b[0;32m 105\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m RestAPIError \u001b[38;5;28;01mas\u001b[39;00m e:\n",
138
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\decorators.py:35\u001b[0m, in \u001b[0;36mconnected.<locals>.if_connected\u001b[1;34m(inst, *args, **kwargs)\u001b[0m\n\u001b[0;32m 34\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m NoHopsworksConnectionError\n\u001b[1;32m---> 35\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[43minst\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
139
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\client\\base.py:179\u001b[0m, in \u001b[0;36mClient._send_request\u001b[1;34m(self, method, path_params, query_params, headers, data, stream, files)\u001b[0m\n\u001b[0;32m 178\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m response\u001b[38;5;241m.\u001b[39mstatus_code \u001b[38;5;241m/\u001b[39m\u001b[38;5;241m/\u001b[39m \u001b[38;5;241m100\u001b[39m \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m2\u001b[39m:\n\u001b[1;32m--> 179\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exceptions\u001b[38;5;241m.\u001b[39mRestAPIError(url, response)\n\u001b[0;32m 181\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stream:\n",
140
  "\u001b[1;31mRestAPIError\u001b[0m: Metadata operation error: (url: https://c.app.hopsworks.ai/hopsworks-api/api/project/549015/featurestores/544840/featureview/tsla_stocks_fv/version/1). Server response: \nHTTP code: 404, HTTP reason: Not Found, body: b'{\"errorCode\":270181,\"usrMsg\":\"There exists no feature view with the name tsla_stocks_fv and version 1.\",\"errorMsg\":\"Feature view wasn\\'t found.\"}', error code: 270181, error msg: Feature view wasn't found., user msg: There exists no feature view with the name tsla_stocks_fv and version 1.",
141
  "\nDuring handling of the above exception, another exception occurred:\n",
142
  "\u001b[1;31mRestAPIError\u001b[0m Traceback (most recent call last)",
143
- "Cell \u001b[1;32mIn[29], line 5\u001b[0m\n\u001b[0;32m 3\u001b[0m tesla_fg \u001b[38;5;241m=\u001b[39m fs\u001b[38;5;241m.\u001b[39mget_feature_group(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtsla_stock\u001b[39m\u001b[38;5;124m'\u001b[39m, version\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m 4\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m:\n\u001b[1;32m----> 5\u001b[0m feature_view, trans_fg \u001b[38;5;241m=\u001b[39m \u001b[43mcreate_stocks_feature_view\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\n",
144
- "Cell \u001b[1;32mIn[28], line 11\u001b[0m, in \u001b[0;36mcreate_stocks_feature_view\u001b[1;34m(fs, version)\u001b[0m\n\u001b[0;32m 5\u001b[0m news_sentiment_fg \u001b[38;5;241m=\u001b[39m fs\u001b[38;5;241m.\u001b[39mget_feature_group(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mnews_sentiment\u001b[39m\u001b[38;5;124m'\u001b[39m, version \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m)\n\u001b[0;32m 7\u001b[0m ds_query \u001b[38;5;241m=\u001b[39m tesla_fg\u001b[38;5;241m.\u001b[39mselect([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mdate\u001b[39m\u001b[38;5;124m'\u001b[39m,\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mopen\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mhigh\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mlow\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mclosed\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mvolume\u001b[39m\u001b[38;5;124m'\u001b[39m])\\\n\u001b[0;32m 8\u001b[0m \u001b[38;5;241m.\u001b[39mjoin(news_sentiment_fg\u001b[38;5;241m.\u001b[39mselect_except([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtime\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mamp_url\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mimage_url\u001b[39m\u001b[38;5;124m'\u001b[39m]))\n\u001b[0;32m 10\u001b[0m transformation_functions \u001b[38;5;241m=\u001b[39m {\n\u001b[1;32m---> 11\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mopen\u001b[39m\u001b[38;5;124m'\u001b[39m: \u001b[43mfs\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_transformation_function\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mmin_max_scaler\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m,\n\u001b[0;32m 12\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mhigh\u001b[39m\u001b[38;5;124m'\u001b[39m: fs\u001b[38;5;241m.\u001b[39mget_transformation_function(name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mmin_max_scaler\u001b[39m\u001b[38;5;124m'\u001b[39m),\n\u001b[0;32m 13\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mlow\u001b[39m\u001b[38;5;124m'\u001b[39m : fs\u001b[38;5;241m.\u001b[39mget_transformation_function(name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mmin_max_scaler\u001b[39m\u001b[38;5;124m'\u001b[39m),\n\u001b[0;32m 14\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mclosed\u001b[39m\u001b[38;5;124m'\u001b[39m : fs\u001b[38;5;241m.\u001b[39mget_transformation_function(name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mmin_max_scaler\u001b[39m\u001b[38;5;124m'\u001b[39m),\n\u001b[0;32m 15\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mvolume\u001b[39m\u001b[38;5;124m'\u001b[39m : fs\u001b[38;5;241m.\u001b[39mget_transformation_function(name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mmin_max_scaler\u001b[39m\u001b[38;5;124m'\u001b[39m),\n\u001b[0;32m 16\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124msentiment\u001b[39m\u001b[38;5;124m'\u001b[39m : fs\u001b[38;5;241m.\u001b[39mget_transformation_function(name\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mmin_max_scaler\u001b[39m\u001b[38;5;124m'\u001b[39m),\n\u001b[0;32m 17\u001b[0m }\n\u001b[0;32m 19\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m (fs\u001b[38;5;241m.\u001b[39mcreate_tesla_feature_view(\n\u001b[0;32m 20\u001b[0m name \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtsla_stocks_fv\u001b[39m\u001b[38;5;124m'\u001b[39m,\n\u001b[0;32m 21\u001b[0m query \u001b[38;5;241m=\u001b[39m ds_query,\n\u001b[0;32m 22\u001b[0m labels\u001b[38;5;241m=\u001b[39m[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mdate\u001b[39m\u001b[38;5;124m'\u001b[39m]\n\u001b[0;32m 23\u001b[0m ), tesla_fg)\n",
145
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\usage.py:212\u001b[0m, in \u001b[0;36mmethod_logger.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 210\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m 211\u001b[0m exception \u001b[38;5;241m=\u001b[39m e\n\u001b[1;32m--> 212\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[0;32m 213\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 214\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n",
146
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\usage.py:208\u001b[0m, in \u001b[0;36mmethod_logger.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 205\u001b[0m exception \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 206\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 207\u001b[0m \u001b[38;5;66;03m# Call the original method\u001b[39;00m\n\u001b[1;32m--> 208\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 209\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m result\n\u001b[0;32m 210\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n",
147
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\feature_store.py:1404\u001b[0m, in \u001b[0;36mFeatureStore.get_transformation_function\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 1304\u001b[0m \u001b[38;5;129m@usage\u001b[39m\u001b[38;5;241m.\u001b[39mmethod_logger\n\u001b[0;32m 1305\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_transformation_function\u001b[39m(\n\u001b[0;32m 1306\u001b[0m \u001b[38;5;28mself\u001b[39m,\n\u001b[0;32m 1307\u001b[0m name: \u001b[38;5;28mstr\u001b[39m,\n\u001b[0;32m 1308\u001b[0m version: Optional[\u001b[38;5;28mint\u001b[39m] \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[0;32m 1309\u001b[0m ):\n\u001b[0;32m 1310\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"Get transformation function metadata object.\u001b[39;00m\n\u001b[0;32m 1311\u001b[0m \n\u001b[0;32m 1312\u001b[0m \u001b[38;5;124;03m !!! example \"Get transformation function by name. This will default to version 1\"\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1402\u001b[0m \u001b[38;5;124;03m `TransformationFunction`: The TransformationFunction metadata object.\u001b[39;00m\n\u001b[0;32m 1403\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m-> 1404\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_transformation_function_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_transformation_fn\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mversion\u001b[49m\u001b[43m)\u001b[49m\n",
148
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\core\\transformation_function_engine.py:65\u001b[0m, in \u001b[0;36mTransformationFunctionEngine.get_transformation_fn\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 63\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mget_transformation_fn\u001b[39m(\u001b[38;5;28mself\u001b[39m, name, version\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m):\n\u001b[0;32m 64\u001b[0m transformation_fn_instances \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m---> 65\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_transformation_function_api\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_transformation_fn\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mversion\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 66\u001b[0m )\n\u001b[0;32m 67\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m transformation_fn_instances[\u001b[38;5;241m0\u001b[39m]\n",
149
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\core\\transformation_function_api.py:72\u001b[0m, in \u001b[0;36mTransformationFunctionApi.get_transformation_fn\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 69\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m version:\n\u001b[0;32m 70\u001b[0m query_params[\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mversion\u001b[39m\u001b[38;5;124m\"\u001b[39m] \u001b[38;5;241m=\u001b[39m version\n\u001b[0;32m 71\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m transformation_function\u001b[38;5;241m.\u001b[39mTransformationFunction\u001b[38;5;241m.\u001b[39mfrom_response_json(\n\u001b[1;32m---> 72\u001b[0m \u001b[43m_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_send_request\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mGET\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpath_params\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mquery_params\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 73\u001b[0m )\n\u001b[0;32m 74\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 75\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m transformation_function\u001b[38;5;241m.\u001b[39mTransformationFunction\u001b[38;5;241m.\u001b[39mfrom_response_json(\n\u001b[0;32m 76\u001b[0m _client\u001b[38;5;241m.\u001b[39m_send_request(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mGET\u001b[39m\u001b[38;5;124m\"\u001b[39m, path_params)\n\u001b[0;32m 77\u001b[0m )\n",
150
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\decorators.py:35\u001b[0m, in \u001b[0;36mconnected.<locals>.if_connected\u001b[1;34m(inst, *args, **kwargs)\u001b[0m\n\u001b[0;32m 33\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m inst\u001b[38;5;241m.\u001b[39m_connected:\n\u001b[0;32m 34\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m NoHopsworksConnectionError\n\u001b[1;32m---> 35\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[43minst\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
151
- "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod\\.conda\\Lib\\site-packages\\hsfs\\client\\base.py:179\u001b[0m, in \u001b[0;36mClient._send_request\u001b[1;34m(self, method, path_params, query_params, headers, data, stream, files)\u001b[0m\n\u001b[0;32m 176\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_session\u001b[38;5;241m.\u001b[39msend(prepped, verify\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_verify, stream\u001b[38;5;241m=\u001b[39mstream)\n\u001b[0;32m 178\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m response\u001b[38;5;241m.\u001b[39mstatus_code \u001b[38;5;241m/\u001b[39m\u001b[38;5;241m/\u001b[39m \u001b[38;5;241m100\u001b[39m \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m2\u001b[39m:\n\u001b[1;32m--> 179\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exceptions\u001b[38;5;241m.\u001b[39mRestAPIError(url, response)\n\u001b[0;32m 181\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stream:\n\u001b[0;32m 182\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m response\n",
152
- "\u001b[1;31mRestAPIError\u001b[0m: Metadata operation error: (url: https://c.app.hopsworks.ai/hopsworks-api/api/project/549015/featurestores/544840/transformationfunctions). Server response: \nHTTP code: 404, HTTP reason: Not Found, body: b'{\"errorCode\":270046,\"usrMsg\":\"HOPSFS Connector: HOPSFS\",\"errorMsg\":\"HopsFs Connector not found\"}', error code: 270046, error msg: HopsFs Connector not found, user msg: HOPSFS Connector: HOPSFS"
153
  ]
154
  }
155
  ],
156
  "source": [
157
  "try:\n",
158
  " feature_view = fs.get_feature_view(\"tsla_stocks_fv\", version=1)\n",
159
- " tesla_fg = fs.get_feature_group('tsla_stock', version=1)\n",
160
  "except:\n",
161
- " feature_view, trans_fg = create_stocks_feature_view(fs, 1)"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
162
  ]
163
  }
164
  ],
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 2,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
 
23
  },
24
  {
25
  "cell_type": "code",
26
+ "execution_count": 4,
27
  "metadata": {},
28
+ "outputs": [
29
+ {
30
+ "name": "stdout",
31
+ "output_type": "stream",
32
+ "text": [
33
+ " date 1. open 2. high 3. low 4. close 5. volume ticker\n",
34
+ "0 2024-05-02 182.86 184.60 176.0200 180.01 89148041.0 TSLA\n",
35
+ "1 2024-05-01 182.00 185.86 179.0100 179.99 92829719.0 TSLA\n",
36
+ "2 2024-04-30 186.98 190.95 182.8401 183.28 127031787.0 TSLA\n",
37
+ "3 2024-04-29 188.42 198.87 184.5400 194.05 243869678.0 TSLA\n",
38
+ "4 2024-04-26 168.85 172.12 166.3700 168.29 109815725.0 TSLA\n",
39
+ "Connected. Call `.close()` to terminate connection gracefully.\n",
40
+ "\n",
41
+ "Logged in to project, explore it here https://c.app.hopsworks.ai:443/p/549015\n",
42
+ "Connected. Call `.close()` to terminate connection gracefully.\n",
43
+ "Index(['date', 'open', 'high', 'low', 'close', 'volume', 'ticker'], dtype='object')\n"
44
+ ]
45
+ },
46
+ {
47
+ "data": {
48
+ "application/vnd.jupyter.widget-view+json": {
49
+ "model_id": "baf52b6f47c74767b5aecb1df6f17c0c",
50
+ "version_major": 2,
51
+ "version_minor": 0
52
+ },
53
+ "text/plain": [
54
+ "Uploading Dataframe: 0.00% | | Rows 0/3485 | Elapsed Time: 00:00 | Remaining Time: ?"
55
+ ]
56
+ },
57
+ "metadata": {},
58
+ "output_type": "display_data"
59
+ },
60
+ {
61
+ "name": "stdout",
62
+ "output_type": "stream",
63
+ "text": [
64
+ "Launching job: tesla_stock_3_offline_fg_materialization\n",
65
+ "Job started successfully, you can follow the progress at \n",
66
+ "https://c.app.hopsworks.ai/p/549015/jobs/named/tesla_stock_3_offline_fg_materialization/executions\n"
67
+ ]
68
+ },
69
+ {
70
+ "data": {
71
+ "application/vnd.jupyter.widget-view+json": {
72
+ "model_id": "3deb8c10be3a44a2b91c63ffe8762725",
73
+ "version_major": 2,
74
+ "version_minor": 0
75
+ },
76
+ "text/plain": [
77
+ "Uploading Dataframe: 0.00% | | Rows 0/720 | Elapsed Time: 00:00 | Remaining Time: ?"
78
+ ]
79
+ },
80
+ "metadata": {},
81
+ "output_type": "display_data"
82
+ },
83
+ {
84
+ "name": "stdout",
85
+ "output_type": "stream",
86
+ "text": [
87
+ "Launching job: news_sentiment_updated_2_offline_fg_materialization\n",
88
+ "Job started successfully, you can follow the progress at \n",
89
+ "https://c.app.hopsworks.ai/p/549015/jobs/named/news_sentiment_updated_2_offline_fg_materialization/executions\n"
90
+ ]
91
+ }
92
+ ],
93
  "source": [
94
  "from feature_pipeline import tesla_fg\n",
95
  "from feature_pipeline import news_sentiment_fg"
 
97
  },
98
  {
99
  "cell_type": "code",
100
+ "execution_count": 5,
101
  "metadata": {},
102
  "outputs": [
103
  {
 
106
  "True"
107
  ]
108
  },
109
+ "execution_count": 5,
110
  "metadata": {},
111
  "output_type": "execute_result"
112
  }
 
120
  },
121
  {
122
  "cell_type": "code",
123
+ "execution_count": 6,
124
  "metadata": {},
125
  "outputs": [
126
  {
 
143
  },
144
  {
145
  "cell_type": "code",
146
+ "execution_count": 18,
147
  "metadata": {},
148
  "outputs": [],
149
  "source": [
150
  "def create_stocks_feature_view(fs, version):\n",
151
  "\n",
152
+ " # Loading in the feature groups\n",
153
+ " tesla_fg = fs.get_feature_group('tesla_stock', version=3)\n",
154
+ " news_sentiment_fg = fs.get_feature_group('news_sentiment_updated', version=2)\n",
155
  "\n",
156
+ " # Define the query\n",
157
+ " ds_query = tesla_fg.select(['date', 'open', 'ticker'])\\\n",
158
+ " .join(news_sentiment_fg.select_except(['ticker', 'time', 'amp_url', 'image_url']))\n",
159
+ "\n",
160
+ " # Create the feature view\n",
161
+ " feature_view = fs.create_feature_view(\n",
162
+ " name='tsla_stocks_fv',\n",
163
+ " query=ds_query,\n",
164
+ " labels=['ticker']\n",
165
+ " )\n",
 
166
  "\n",
167
+ " return feature_view, tesla_fg"
 
 
 
 
168
  ]
169
  },
170
  {
171
  "cell_type": "code",
172
+ "execution_count": 19,
173
  "metadata": {},
174
  "outputs": [
 
 
 
 
 
 
 
 
175
  {
176
  "ename": "RestAPIError",
177
+ "evalue": "Metadata operation error: (url: https://c.app.hopsworks.ai/hopsworks-api/api/project/549015/featurestores/544840/featureview). Server response: \nHTTP code: 404, HTTP reason: Not Found, body: b'{\"errorCode\":270046,\"usrMsg\":\"HOPSFS Connector: fklitte_Training_Datasets\",\"errorMsg\":\"HopsFs Connector not found\"}', error code: 270046, error msg: HopsFs Connector not found, user msg: HOPSFS Connector: fklitte_Training_Datasets",
178
  "output_type": "error",
179
  "traceback": [
180
  "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
181
  "\u001b[1;31mRestAPIError\u001b[0m Traceback (most recent call last)",
182
+ "Cell \u001b[1;32mIn[19], line 2\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m----> 2\u001b[0m feature_view \u001b[38;5;241m=\u001b[39m \u001b[43mfs\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_feature_view\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtsla_stocks_fv\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mversion\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 3\u001b[0m tesla_fg \u001b[38;5;241m=\u001b[39m fs\u001b[38;5;241m.\u001b[39mget_feature_group(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtesla_stock\u001b[39m\u001b[38;5;124m'\u001b[39m, version\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m3\u001b[39m)\n",
183
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\usage.py:212\u001b[0m, in \u001b[0;36mmethod_logger.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 211\u001b[0m exception \u001b[38;5;241m=\u001b[39m e\n\u001b[1;32m--> 212\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[0;32m 213\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n",
184
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\usage.py:208\u001b[0m, in \u001b[0;36mmethod_logger.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 206\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 207\u001b[0m \u001b[38;5;66;03m# Call the original method\u001b[39;00m\n\u001b[1;32m--> 208\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 209\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m result\n",
185
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\feature_store.py:1661\u001b[0m, in \u001b[0;36mFeatureStore.get_feature_view\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 1660\u001b[0m version \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mDEFAULT_VERSION\n\u001b[1;32m-> 1661\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_feature_view_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mversion\u001b[49m\u001b[43m)\u001b[49m\n",
186
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\core\\feature_view_engine.py:127\u001b[0m, in \u001b[0;36mFeatureViewEngine.get\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 126\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m version:\n\u001b[1;32m--> 127\u001b[0m fv \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_feature_view_api\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_by_name_version\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mversion\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 128\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mattach_transformation_function(fv)\n",
187
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\core\\feature_view_api.py:113\u001b[0m, in \u001b[0;36mFeatureViewApi.get_by_name_version\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 112\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 113\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n",
188
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\core\\feature_view_api.py:101\u001b[0m, in \u001b[0;36mFeatureViewApi.get_by_name_version\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 99\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 100\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m feature_view\u001b[38;5;241m.\u001b[39mFeatureView\u001b[38;5;241m.\u001b[39mfrom_response_json(\n\u001b[1;32m--> 101\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_send_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 102\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_GET\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mexpand\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mquery\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfeatures\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m}\u001b[49m\n\u001b[0;32m 103\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 104\u001b[0m )\n\u001b[0;32m 105\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m RestAPIError \u001b[38;5;28;01mas\u001b[39;00m e:\n",
189
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\decorators.py:35\u001b[0m, in \u001b[0;36mconnected.<locals>.if_connected\u001b[1;34m(inst, *args, **kwargs)\u001b[0m\n\u001b[0;32m 34\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m NoHopsworksConnectionError\n\u001b[1;32m---> 35\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[43minst\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
190
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\client\\base.py:176\u001b[0m, in \u001b[0;36mClient._send_request\u001b[1;34m(self, method, path_params, query_params, headers, data, stream, files)\u001b[0m\n\u001b[0;32m 175\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m response\u001b[38;5;241m.\u001b[39mstatus_code \u001b[38;5;241m/\u001b[39m\u001b[38;5;241m/\u001b[39m \u001b[38;5;241m100\u001b[39m \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m2\u001b[39m:\n\u001b[1;32m--> 176\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exceptions\u001b[38;5;241m.\u001b[39mRestAPIError(url, response)\n\u001b[0;32m 178\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stream:\n",
191
  "\u001b[1;31mRestAPIError\u001b[0m: Metadata operation error: (url: https://c.app.hopsworks.ai/hopsworks-api/api/project/549015/featurestores/544840/featureview/tsla_stocks_fv/version/1). Server response: \nHTTP code: 404, HTTP reason: Not Found, body: b'{\"errorCode\":270181,\"usrMsg\":\"There exists no feature view with the name tsla_stocks_fv and version 1.\",\"errorMsg\":\"Feature view wasn\\'t found.\"}', error code: 270181, error msg: Feature view wasn't found., user msg: There exists no feature view with the name tsla_stocks_fv and version 1.",
192
  "\nDuring handling of the above exception, another exception occurred:\n",
193
  "\u001b[1;31mRestAPIError\u001b[0m Traceback (most recent call last)",
194
+ "Cell \u001b[1;32mIn[19], line 5\u001b[0m\n\u001b[0;32m 3\u001b[0m tesla_fg \u001b[38;5;241m=\u001b[39m fs\u001b[38;5;241m.\u001b[39mget_feature_group(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtesla_stock\u001b[39m\u001b[38;5;124m'\u001b[39m, version\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m3\u001b[39m)\n\u001b[0;32m 4\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m:\n\u001b[1;32m----> 5\u001b[0m feature_view, tesla_fg \u001b[38;5;241m=\u001b[39m \u001b[43mcreate_stocks_feature_view\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\n",
195
+ "Cell \u001b[1;32mIn[18], line 12\u001b[0m, in \u001b[0;36mcreate_stocks_feature_view\u001b[1;34m(fs, version)\u001b[0m\n\u001b[0;32m 8\u001b[0m ds_query \u001b[38;5;241m=\u001b[39m tesla_fg\u001b[38;5;241m.\u001b[39mselect([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mdate\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mopen\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mticker\u001b[39m\u001b[38;5;124m'\u001b[39m])\\\n\u001b[0;32m 9\u001b[0m \u001b[38;5;241m.\u001b[39mjoin(news_sentiment_fg\u001b[38;5;241m.\u001b[39mselect_except([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mticker\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtime\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mamp_url\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mimage_url\u001b[39m\u001b[38;5;124m'\u001b[39m]))\n\u001b[0;32m 11\u001b[0m \u001b[38;5;66;03m# Create the feature view\u001b[39;00m\n\u001b[1;32m---> 12\u001b[0m feature_view \u001b[38;5;241m=\u001b[39m \u001b[43mfs\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate_feature_view\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 13\u001b[0m \u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mtsla_stocks_fv\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[0;32m 14\u001b[0m \u001b[43m \u001b[49m\u001b[43mquery\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mds_query\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 15\u001b[0m \u001b[43m \u001b[49m\u001b[43mlabels\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mticker\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\n\u001b[0;32m 16\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 18\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m feature_view, tesla_fg\n",
196
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\usage.py:212\u001b[0m, in \u001b[0;36mmethod_logger.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 210\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m 211\u001b[0m exception \u001b[38;5;241m=\u001b[39m e\n\u001b[1;32m--> 212\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[0;32m 213\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[0;32m 214\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n",
197
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\usage.py:208\u001b[0m, in \u001b[0;36mmethod_logger.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 205\u001b[0m exception \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[0;32m 206\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 207\u001b[0m \u001b[38;5;66;03m# Call the original method\u001b[39;00m\n\u001b[1;32m--> 208\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 209\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m result\n\u001b[0;32m 210\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n",
198
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\feature_store.py:1538\u001b[0m, in \u001b[0;36mFeatureStore.create_feature_view\u001b[1;34m(self, name, query, version, description, labels, inference_helper_columns, training_helper_columns, transformation_functions)\u001b[0m\n\u001b[0;32m 1436\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Create a feature view metadata object and saved it to hopsworks.\u001b[39;00m\n\u001b[0;32m 1437\u001b[0m \n\u001b[0;32m 1438\u001b[0m \u001b[38;5;124;03m!!! example\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1524\u001b[0m \u001b[38;5;124;03m `FeatureView`: The feature view metadata object.\u001b[39;00m\n\u001b[0;32m 1525\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 1526\u001b[0m feat_view \u001b[38;5;241m=\u001b[39m feature_view\u001b[38;5;241m.\u001b[39mFeatureView(\n\u001b[0;32m 1527\u001b[0m name\u001b[38;5;241m=\u001b[39mname,\n\u001b[0;32m 1528\u001b[0m query\u001b[38;5;241m=\u001b[39mquery,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 1536\u001b[0m transformation_functions\u001b[38;5;241m=\u001b[39mtransformation_functions,\n\u001b[0;32m 1537\u001b[0m )\n\u001b[1;32m-> 1538\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_feature_view_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msave\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfeat_view\u001b[49m\u001b[43m)\u001b[49m\n",
199
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\core\\feature_view_engine.py:113\u001b[0m, in \u001b[0;36mFeatureViewEngine.save\u001b[1;34m(self, feature_view_obj)\u001b[0m\n\u001b[0;32m 104\u001b[0m feature_view_obj\u001b[38;5;241m.\u001b[39m_features\u001b[38;5;241m.\u001b[39mappend(\n\u001b[0;32m 105\u001b[0m training_dataset_feature\u001b[38;5;241m.\u001b[39mTrainingDatasetFeature(\n\u001b[0;32m 106\u001b[0m name\u001b[38;5;241m=\u001b[39mfeature\u001b[38;5;241m.\u001b[39mname,\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 109\u001b[0m )\n\u001b[0;32m 110\u001b[0m )\n\u001b[0;32m 112\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_transformation_function_engine\u001b[38;5;241m.\u001b[39mattach_transformation_fn(feature_view_obj)\n\u001b[1;32m--> 113\u001b[0m updated_fv \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_feature_view_api\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpost\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfeature_view_obj\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 114\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mattach_transformation_function(updated_fv)\n\u001b[0;32m 115\u001b[0m \u001b[38;5;28mprint\u001b[39m(\n\u001b[0;32m 116\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFeature view created successfully, explore it at \u001b[39m\u001b[38;5;130;01m\\n\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 117\u001b[0m \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_feature_view_url(updated_fv)\n\u001b[0;32m 118\u001b[0m )\n",
200
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\core\\feature_view_api.py:60\u001b[0m, in \u001b[0;36mFeatureViewApi.post\u001b[1;34m(self, feature_view_obj)\u001b[0m\n\u001b[0;32m 57\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mpost\u001b[39m(\u001b[38;5;28mself\u001b[39m, feature_view_obj):\n\u001b[0;32m 58\u001b[0m headers \u001b[38;5;241m=\u001b[39m {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcontent-type\u001b[39m\u001b[38;5;124m\"\u001b[39m: \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mapplication/json\u001b[39m\u001b[38;5;124m\"\u001b[39m}\n\u001b[0;32m 59\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m feature_view_obj\u001b[38;5;241m.\u001b[39mupdate_from_response_json(\n\u001b[1;32m---> 60\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_send_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 61\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_POST\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 62\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_base_path\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 63\u001b[0m \u001b[43m \u001b[49m\u001b[43mheaders\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mheaders\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 64\u001b[0m \u001b[43m \u001b[49m\u001b[43mdata\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfeature_view_obj\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mjson\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\n\u001b[0;32m 65\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 66\u001b[0m )\n",
201
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\decorators.py:35\u001b[0m, in \u001b[0;36mconnected.<locals>.if_connected\u001b[1;34m(inst, *args, **kwargs)\u001b[0m\n\u001b[0;32m 33\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m inst\u001b[38;5;241m.\u001b[39m_connected:\n\u001b[0;32m 34\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m NoHopsworksConnectionError\n\u001b[1;32m---> 35\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[43minst\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
202
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\client\\base.py:176\u001b[0m, in \u001b[0;36mClient._send_request\u001b[1;34m(self, method, path_params, query_params, headers, data, stream, files)\u001b[0m\n\u001b[0;32m 171\u001b[0m response \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_retry_token_expired(\n\u001b[0;32m 172\u001b[0m request, stream, \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mTOKEN_EXPIRED_RETRY_INTERVAL, \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m 173\u001b[0m )\n\u001b[0;32m 175\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m response\u001b[38;5;241m.\u001b[39mstatus_code \u001b[38;5;241m/\u001b[39m\u001b[38;5;241m/\u001b[39m \u001b[38;5;241m100\u001b[39m \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m2\u001b[39m:\n\u001b[1;32m--> 176\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exceptions\u001b[38;5;241m.\u001b[39mRestAPIError(url, response)\n\u001b[0;32m 178\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stream:\n\u001b[0;32m 179\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m response\n",
203
+ "\u001b[1;31mRestAPIError\u001b[0m: Metadata operation error: (url: https://c.app.hopsworks.ai/hopsworks-api/api/project/549015/featurestores/544840/featureview). Server response: \nHTTP code: 404, HTTP reason: Not Found, body: b'{\"errorCode\":270046,\"usrMsg\":\"HOPSFS Connector: fklitte_Training_Datasets\",\"errorMsg\":\"HopsFs Connector not found\"}', error code: 270046, error msg: HopsFs Connector not found, user msg: HOPSFS Connector: fklitte_Training_Datasets"
204
  ]
205
  }
206
  ],
207
  "source": [
208
  "try:\n",
209
  " feature_view = fs.get_feature_view(\"tsla_stocks_fv\", version=1)\n",
210
+ " tesla_fg = fs.get_feature_group('tesla_stock', version=3)\n",
211
  "except:\n",
212
+ " feature_view, tesla_fg = create_stocks_feature_view(fs, 1)"
213
+ ]
214
+ },
215
+ {
216
+ "cell_type": "code",
217
+ "execution_count": 14,
218
+ "metadata": {},
219
+ "outputs": [],
220
+ "source": [
221
+ "#def create_stocks_feature_view(fs, version):\n",
222
+ "\n",
223
+ " #Loading in the feature groups\n",
224
+ "# tesla_fg = fs.get_feature_group('tesla_stock', version = 3)\n",
225
+ "# news_sentiment_fg = fs.get_feature_group('news_sentiment_updated', version = 2)\n",
226
+ "\n",
227
+ "# ds_query = tesla_fg.select(['date','open', 'ticker'])\\\n",
228
+ "# .join(news_sentiment_fg.select_except(['ticker','time', 'amp_url', 'image_url']))\n",
229
+ " \n",
230
+ "# return (fs.create_tesla_feature_view(\n",
231
+ "# name = 'tsla_stocks_fv',\n",
232
+ "# query = ds_query,\n",
233
+ "# labels=['ticker']\n",
234
+ "# ), tesla_fg)"
235
+ ]
236
+ },
237
+ {
238
+ "cell_type": "code",
239
+ "execution_count": 15,
240
+ "metadata": {},
241
+ "outputs": [
242
+ {
243
+ "ename": "AttributeError",
244
+ "evalue": "'FeatureStore' object has no attribute 'create_tesla_feature_view'",
245
+ "output_type": "error",
246
+ "traceback": [
247
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
248
+ "\u001b[1;31mRestAPIError\u001b[0m Traceback (most recent call last)",
249
+ "Cell \u001b[1;32mIn[15], line 2\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m----> 2\u001b[0m feature_view \u001b[38;5;241m=\u001b[39m \u001b[43mfs\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_feature_view\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mtsla_stocks_fv\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mversion\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[0;32m 3\u001b[0m tesla_fg \u001b[38;5;241m=\u001b[39m fs\u001b[38;5;241m.\u001b[39mget_feature_group(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtesla_stock\u001b[39m\u001b[38;5;124m'\u001b[39m, version\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m3\u001b[39m)\n",
250
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\usage.py:212\u001b[0m, in \u001b[0;36mmethod_logger.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 211\u001b[0m exception \u001b[38;5;241m=\u001b[39m e\n\u001b[1;32m--> 212\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n\u001b[0;32m 213\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n",
251
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\usage.py:208\u001b[0m, in \u001b[0;36mmethod_logger.<locals>.wrapper\u001b[1;34m(*args, **kwargs)\u001b[0m\n\u001b[0;32m 206\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 207\u001b[0m \u001b[38;5;66;03m# Call the original method\u001b[39;00m\n\u001b[1;32m--> 208\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mfunc\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 209\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m result\n",
252
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\feature_store.py:1661\u001b[0m, in \u001b[0;36mFeatureStore.get_feature_view\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 1660\u001b[0m version \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mDEFAULT_VERSION\n\u001b[1;32m-> 1661\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_feature_view_engine\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mversion\u001b[49m\u001b[43m)\u001b[49m\n",
253
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\core\\feature_view_engine.py:127\u001b[0m, in \u001b[0;36mFeatureViewEngine.get\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 126\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m version:\n\u001b[1;32m--> 127\u001b[0m fv \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_feature_view_api\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mget_by_name_version\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mversion\u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 128\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mattach_transformation_function(fv)\n",
254
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\core\\feature_view_api.py:113\u001b[0m, in \u001b[0;36mFeatureViewApi.get_by_name_version\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 112\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 113\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m e\n",
255
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\core\\feature_view_api.py:101\u001b[0m, in \u001b[0;36mFeatureViewApi.get_by_name_version\u001b[1;34m(self, name, version)\u001b[0m\n\u001b[0;32m 99\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 100\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m feature_view\u001b[38;5;241m.\u001b[39mFeatureView\u001b[38;5;241m.\u001b[39mfrom_response_json(\n\u001b[1;32m--> 101\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_client\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_send_request\u001b[49m\u001b[43m(\u001b[49m\n\u001b[0;32m 102\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_GET\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpath\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mexpand\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mquery\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mfeatures\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m}\u001b[49m\n\u001b[0;32m 103\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[0;32m 104\u001b[0m )\n\u001b[0;32m 105\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m RestAPIError \u001b[38;5;28;01mas\u001b[39;00m e:\n",
256
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\decorators.py:35\u001b[0m, in \u001b[0;36mconnected.<locals>.if_connected\u001b[1;34m(inst, *args, **kwargs)\u001b[0m\n\u001b[0;32m 34\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m NoHopsworksConnectionError\n\u001b[1;32m---> 35\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfn\u001b[49m\u001b[43m(\u001b[49m\u001b[43minst\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n",
257
+ "File \u001b[1;32mc:\\Users\\frede\\OneDrive\\Dokumenter\\Master\\MLops\\MLops_mod-2\\.conda\\Lib\\site-packages\\hsfs\\client\\base.py:176\u001b[0m, in \u001b[0;36mClient._send_request\u001b[1;34m(self, method, path_params, query_params, headers, data, stream, files)\u001b[0m\n\u001b[0;32m 175\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m response\u001b[38;5;241m.\u001b[39mstatus_code \u001b[38;5;241m/\u001b[39m\u001b[38;5;241m/\u001b[39m \u001b[38;5;241m100\u001b[39m \u001b[38;5;241m!=\u001b[39m \u001b[38;5;241m2\u001b[39m:\n\u001b[1;32m--> 176\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exceptions\u001b[38;5;241m.\u001b[39mRestAPIError(url, response)\n\u001b[0;32m 178\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m stream:\n",
258
+ "\u001b[1;31mRestAPIError\u001b[0m: Metadata operation error: (url: https://c.app.hopsworks.ai/hopsworks-api/api/project/549015/featurestores/544840/featureview/tsla_stocks_fv/version/1). Server response: \nHTTP code: 404, HTTP reason: Not Found, body: b'{\"errorCode\":270181,\"usrMsg\":\"There exists no feature view with the name tsla_stocks_fv and version 1.\",\"errorMsg\":\"Feature view wasn\\'t found.\"}', error code: 270181, error msg: Feature view wasn't found., user msg: There exists no feature view with the name tsla_stocks_fv and version 1.",
259
+ "\nDuring handling of the above exception, another exception occurred:\n",
260
+ "\u001b[1;31mAttributeError\u001b[0m Traceback (most recent call last)",
261
+ "Cell \u001b[1;32mIn[15], line 5\u001b[0m\n\u001b[0;32m 3\u001b[0m tesla_fg \u001b[38;5;241m=\u001b[39m fs\u001b[38;5;241m.\u001b[39mget_feature_group(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtesla_stock\u001b[39m\u001b[38;5;124m'\u001b[39m, version\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m3\u001b[39m)\n\u001b[0;32m 4\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m:\n\u001b[1;32m----> 5\u001b[0m feature_view, tesla_fg \u001b[38;5;241m=\u001b[39m \u001b[43mcreate_stocks_feature_view\u001b[49m\u001b[43m(\u001b[49m\u001b[43mfs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m1\u001b[39;49m\u001b[43m)\u001b[49m\n",
262
+ "Cell \u001b[1;32mIn[14], line 10\u001b[0m, in \u001b[0;36mcreate_stocks_feature_view\u001b[1;34m(fs, version)\u001b[0m\n\u001b[0;32m 5\u001b[0m news_sentiment_fg \u001b[38;5;241m=\u001b[39m fs\u001b[38;5;241m.\u001b[39mget_feature_group(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mnews_sentiment_updated\u001b[39m\u001b[38;5;124m'\u001b[39m, version \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m2\u001b[39m)\n\u001b[0;32m 7\u001b[0m ds_query \u001b[38;5;241m=\u001b[39m tesla_fg\u001b[38;5;241m.\u001b[39mselect([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mdate\u001b[39m\u001b[38;5;124m'\u001b[39m,\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mopen\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mticker\u001b[39m\u001b[38;5;124m'\u001b[39m])\\\n\u001b[0;32m 8\u001b[0m \u001b[38;5;241m.\u001b[39mjoin(news_sentiment_fg\u001b[38;5;241m.\u001b[39mselect_except([\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mticker\u001b[39m\u001b[38;5;124m'\u001b[39m,\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtime\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mamp_url\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mimage_url\u001b[39m\u001b[38;5;124m'\u001b[39m]))\n\u001b[1;32m---> 10\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m (\u001b[43mfs\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcreate_tesla_feature_view\u001b[49m(\n\u001b[0;32m 11\u001b[0m name \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtsla_stocks_fv\u001b[39m\u001b[38;5;124m'\u001b[39m,\n\u001b[0;32m 12\u001b[0m query \u001b[38;5;241m=\u001b[39m ds_query,\n\u001b[0;32m 13\u001b[0m labels\u001b[38;5;241m=\u001b[39m[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mticker\u001b[39m\u001b[38;5;124m'\u001b[39m]\n\u001b[0;32m 14\u001b[0m ), tesla_fg)\n",
263
+ "\u001b[1;31mAttributeError\u001b[0m: 'FeatureStore' object has no attribute 'create_tesla_feature_view'"
264
+ ]
265
+ }
266
+ ],
267
+ "source": [
268
+ "#try:\n",
269
+ "# feature_view = fs.get_feature_view(\"tsla_stocks_fv\", version=1)\n",
270
+ "# tesla_fg = fs.get_feature_group('tesla_stock', version=3)\n",
271
+ "#except:\n",
272
+ "# feature_view, tesla_fg = create_stocks_feature_view(fs, 1)"
273
  ]
274
  }
275
  ],
requirements.txt CHANGED
@@ -16,3 +16,4 @@ python-dotenv
16
  requests
17
  alpha_vantage
18
  textblob
 
 
16
  requests
17
  alpha_vantage
18
  textblob
19
+ great_expectations==0.18.12