nandovallec commited on
Commit
db413b1
1 Parent(s): 1466cb4
README.md CHANGED
@@ -1,10 +1,10 @@
1
  ---
2
  title: Iris
3
- emoji: 📚
4
- colorFrom: green
5
- colorTo: pink
6
  sdk: gradio
7
- sdk_version: 3.9
8
  app_file: app.py
9
  pinned: false
10
  license: apache-2.0
 
1
  ---
2
  title: Iris
3
+ emoji: 🐢
4
+ colorFrom: purple
5
+ colorTo: green
6
  sdk: gradio
7
+ sdk_version: 3.5
8
  app_file: app.py
9
  pinned: false
10
  license: apache-2.0
app.py CHANGED
@@ -1,31 +1,47 @@
1
  import gradio as gr
 
2
  from PIL import Image
 
 
3
  import hopsworks
 
4
 
5
  project = hopsworks.login()
6
  fs = project.get_feature_store()
7
 
8
- dataset_api = project.get_dataset_api()
9
-
10
- dataset_api.download("Resources/images/latest_iris.png")
11
- dataset_api.download("Resources/images/actual_iris.png")
12
- dataset_api.download("Resources/images/df_recent.png")
13
- dataset_api.download("Resources/images/confusion_matrix.png")
14
-
15
- with gr.Blocks() as demo:
16
- with gr.Row():
17
- with gr.Column():
18
- gr.Label("Today's Predicted Image")
19
- input_img = gr.Image("latest_iris.png", elem_id="predicted-img")
20
- with gr.Column():
21
- gr.Label("Today's Actual Image")
22
- input_img = gr.Image("actual_iris.png", elem_id="actual-img")
23
- with gr.Row():
24
- with gr.Column():
25
- gr.Label("Recent Prediction History")
26
- input_img = gr.Image("df_recent.png", elem_id="recent-predictions")
27
- with gr.Column():
28
- gr.Label("Confusion Maxtrix with Historical Prediction Performance")
29
- input_img = gr.Image("confusion_matrix.png", elem_id="confusion-matrix")
 
 
 
 
 
 
 
 
 
 
 
30
 
31
  demo.launch()
 
 
1
  import gradio as gr
2
+ import numpy as np
3
  from PIL import Image
4
+ import requests
5
+
6
  import hopsworks
7
+ import joblib
8
 
9
  project = hopsworks.login()
10
  fs = project.get_feature_store()
11
 
12
+
13
+ mr = project.get_model_registry()
14
+ model = mr.get_model("iris_modal", version=1)
15
+ model_dir = model.download()
16
+ model = joblib.load(model_dir + "/iris_model.pkl")
17
+
18
+
19
+ def iris(sepal_length, sepal_width, petal_length, petal_width):
20
+ input_list = []
21
+ input_list.append(sepal_length)
22
+ input_list.append(sepal_width)
23
+ input_list.append(petal_length)
24
+ input_list.append(petal_width)
25
+ # 'res' is a list of predictions returned as the label.
26
+ res = model.predict(np.asarray(input_list).reshape(1, -1))
27
+ # We add '[0]' to the result of the transformed 'res', because 'res' is a list, and we only want
28
+ # the first element.
29
+ flower_url = "https://raw.githubusercontent.com/featurestoreorg/serverless-ml-course/main/src/01-module/assets/" + res[0] + ".png"
30
+ img = Image.open(requests.get(flower_url, stream=True).raw)
31
+ return img
32
+
33
+ demo = gr.Interface(
34
+ fn=iris,
35
+ title="Iris Flower Predictive Analytics",
36
+ description="Experiment with sepal/petal lengths/widths to predict which flower it is.",
37
+ allow_flagging="never",
38
+ inputs=[
39
+ gr.inputs.Number(default=1.0, label="sepal length (cm)"),
40
+ gr.inputs.Number(default=1.0, label="sepal width (cm)"),
41
+ gr.inputs.Number(default=1.0, label="petal length (cm)"),
42
+ gr.inputs.Number(default=1.0, label="petal width (cm)"),
43
+ ],
44
+ outputs=gr.Image(type="pil"))
45
 
46
  demo.launch()
47
+
huggingface-spaces-iris-monitor/app.py DELETED
@@ -1,31 +0,0 @@
1
- import gradio as gr
2
- from PIL import Image
3
- import hopsworks
4
-
5
- project = hopsworks.login()
6
- fs = project.get_feature_store()
7
-
8
- dataset_api = project.get_dataset_api()
9
-
10
- dataset_api.download("Resources/images/latest_iris.png")
11
- dataset_api.download("Resources/images/actual_iris.png")
12
- dataset_api.download("Resources/images/df_recent.png")
13
- dataset_api.download("Resources/images/confusion_matrix.png")
14
-
15
- with gr.Blocks() as demo:
16
- with gr.Row():
17
- with gr.Column():
18
- gr.Label("Today's Predicted Image")
19
- input_img = gr.Image("latest_iris.png", elem_id="predicted-img")
20
- with gr.Column():
21
- gr.Label("Today's Actual Image")
22
- input_img = gr.Image("actual_iris.png", elem_id="actual-img")
23
- with gr.Row():
24
- with gr.Column():
25
- gr.Label("Recent Prediction History")
26
- input_img = gr.Image("df_recent.png", elem_id="recent-predictions")
27
- with gr.Column():
28
- gr.Label("Confusion Maxtrix with Historical Prediction Performance")
29
- input_img = gr.Image("confusion_matrix.png", elem_id="confusion-matrix")
30
-
31
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
huggingface-spaces-iris-monitor/requirements.txt DELETED
@@ -1 +0,0 @@
1
- hopsworks
 
 
huggingface-spaces-iris/app.py DELETED
@@ -1,31 +0,0 @@
1
- import gradio as gr
2
- from PIL import Image
3
- import hopsworks
4
-
5
- project = hopsworks.login()
6
- fs = project.get_feature_store()
7
-
8
- dataset_api = project.get_dataset_api()
9
-
10
- dataset_api.download("Resources/images/latest_iris.png")
11
- dataset_api.download("Resources/images/actual_iris.png")
12
- dataset_api.download("Resources/images/df_recent.png")
13
- dataset_api.download("Resources/images/confusion_matrix.png")
14
-
15
- with gr.Blocks() as demo:
16
- with gr.Row():
17
- with gr.Column():
18
- gr.Label("Today's Predicted Image")
19
- input_img = gr.Image("latest_iris.png", elem_id="predicted-img")
20
- with gr.Column():
21
- gr.Label("Today's Actual Image")
22
- input_img = gr.Image("actual_iris.png", elem_id="actual-img")
23
- with gr.Row():
24
- with gr.Column():
25
- gr.Label("Recent Prediction History")
26
- input_img = gr.Image("df_recent.png", elem_id="recent-predictions")
27
- with gr.Column():
28
- gr.Label("Confusion Maxtrix with Historical Prediction Performance")
29
- input_img = gr.Image("confusion_matrix.png", elem_id="confusion-matrix")
30
-
31
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
huggingface-spaces-iris/requirements.txt DELETED
@@ -1 +0,0 @@
1
- hopsworks
 
 
iris-batch-inference-pipeline.py DELETED
@@ -1,107 +0,0 @@
1
- import os
2
- import modal
3
-
4
- LOCAL=True
5
-
6
- if LOCAL == False:
7
- stub = modal.Stub()
8
- hopsworks_image = modal.Image.debian_slim().pip_install(["hopsworks","joblib","seaborn","sklearn","dataframe-image"])
9
- @stub.function(image=hopsworks_image, schedule=modal.Period(days=1), secret=modal.Secret.from_name("jim-hopsworks-ai"))
10
- def f():
11
- g()
12
-
13
- def g():
14
- import pandas as pd
15
- import hopsworks
16
- import joblib
17
- import datetime
18
- from PIL import Image
19
- from datetime import datetime
20
- import dataframe_image as dfi
21
- from sklearn.metrics import confusion_matrix
22
- from matplotlib import pyplot
23
- import seaborn as sns
24
- import requests
25
-
26
- project = hopsworks.login()
27
- fs = project.get_feature_store()
28
-
29
- mr = project.get_model_registry()
30
- model = mr.get_model("iris_modal", version=1)
31
- model_dir = model.download()
32
- model = joblib.load(model_dir + "/iris_model.pkl")
33
-
34
- feature_view = fs.get_feature_view(name="iris_modal", version=1)
35
- batch_data = feature_view.get_batch_data()
36
-
37
- y_pred = model.predict(batch_data)
38
- # print(y_pred)
39
- flower = y_pred[y_pred.size-1]
40
- flower_url = "https://raw.githubusercontent.com/featurestoreorg/serverless-ml-course/main/src/01-module/assets/" + flower + ".png"
41
- print("Flower predicted: " + flower)
42
- img = Image.open(requests.get(flower_url, stream=True).raw)
43
- img.save("./latest_iris.png")
44
- dataset_api = project.get_dataset_api()
45
- dataset_api.upload("./latest_iris.png", "Resources/images", overwrite=True)
46
-
47
- iris_fg = fs.get_feature_group(name="iris_modal", version=1)
48
- df = iris_fg.read()
49
- # print(df["variety"])
50
- label = df.iloc[-1]["variety"]
51
- label_url = "https://raw.githubusercontent.com/featurestoreorg/serverless-ml-course/main/src/01-module/assets/" + label + ".png"
52
- print("Flower actual: " + label)
53
- img = Image.open(requests.get(label_url, stream=True).raw)
54
- img.save("./actual_iris.png")
55
- dataset_api.upload("./actual_iris.png", "Resources/images", overwrite=True)
56
-
57
- monitor_fg = fs.get_or_create_feature_group(name="iris_predictions",
58
- version=1,
59
- primary_key=["datetime"],
60
- description="Iris flower Prediction/Outcome Monitoring"
61
- )
62
-
63
- now = datetime.now().strftime("%m/%d/%Y, %H:%M:%S")
64
- data = {
65
- 'prediction': [flower],
66
- 'label': [label],
67
- 'datetime': [now],
68
- }
69
- monitor_df = pd.DataFrame(data)
70
- monitor_fg.insert(monitor_df, write_options={"wait_for_job" : False})
71
-
72
- history_df = monitor_fg.read()
73
- # Add our prediction to the history, as the history_df won't have it -
74
- # the insertion was done asynchronously, so it will take ~1 min to land on App
75
- history_df = pd.concat([history_df, monitor_df])
76
-
77
-
78
- df_recent = history_df.tail(5)
79
- dfi.export(df_recent, './df_recent.png', table_conversion = 'matplotlib')
80
- dataset_api.upload("./df_recent.png", "Resources/images", overwrite=True)
81
-
82
- predictions = history_df[['prediction']]
83
- labels = history_df[['label']]
84
-
85
- # Only create the confusion matrix when our iris_predictions feature group has examples of all 3 iris flowers
86
- print("Number of different flower predictions to date: " + str(predictions.value_counts().count()))
87
- if predictions.value_counts().count() == 3:
88
- results = confusion_matrix(labels, predictions)
89
-
90
- df_cm = pd.DataFrame(results, ['True Setosa', 'True Versicolor', 'True Virginica'],
91
- ['Pred Setosa', 'Pred Versicolor', 'Pred Virginica'])
92
-
93
- cm = sns.heatmap(df_cm, annot=True)
94
- fig = cm.get_figure()
95
- fig.savefig("./confusion_matrix.png")
96
- dataset_api.upload("./confusion_matrix.png", "Resources/images", overwrite=True)
97
- else:
98
- print("You need 3 different flower predictions to create the confusion matrix.")
99
- print("Run the batch inference pipeline more times until you get 3 different iris flower predictions")
100
-
101
-
102
- if __name__ == "__main__":
103
- if LOCAL == True :
104
- g()
105
- else:
106
- with stub.run():
107
- f()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
iris-feature-pipeline-daily.py DELETED
@@ -1,84 +0,0 @@
1
- import os
2
- import modal
3
-
4
- BACKFILL=False
5
- LOCAL=False
6
-
7
- if LOCAL == False:
8
- stub = modal.Stub()
9
- image = modal.Image.debian_slim().pip_install(["hopsworks","joblib","seaborn","sklearn","dataframe-image"])
10
-
11
- @stub.function(image=image, schedule=modal.Period(days=1), secret=modal.Secret.from_name("jim-hopsworks-ai"))
12
- def f():
13
- g()
14
-
15
-
16
- def generate_flower(name, sepal_len_max, sepal_len_min, sepal_width_max, sepal_width_min,
17
- petal_len_max, petal_len_min, petal_width_max, petal_width_min):
18
- """
19
- Returns a single iris flower as a single row in a DataFrame
20
- """
21
- import pandas as pd
22
- import random
23
-
24
- df = pd.DataFrame({ "sepal_length": [random.uniform(sepal_len_max, sepal_len_min)],
25
- "sepal_width": [random.uniform(sepal_width_max, sepal_width_min)],
26
- "petal_length": [random.uniform(petal_len_max, petal_len_min)],
27
- "petal_width": [random.uniform(petal_width_max, petal_width_min)]
28
- })
29
- df['variety'] = name
30
- return df
31
-
32
-
33
- def get_random_iris_flower():
34
- """
35
- Returns a DataFrame containing one random iris flower
36
- """
37
- import pandas as pd
38
- import random
39
-
40
- virginica_df = generate_flower("Virginica", 8, 5.5, 3.8, 2.2, 7, 4.5, 2.5, 1.4)
41
- versicolor_df = generate_flower("Versicolor", 7.5, 4.5, 3.5, 2.1, 3.1, 5.5, 1.8, 1.0)
42
- setosa_df = generate_flower("Setosa", 6, 4.5, 4.5, 2.3, 1.2, 2, 0.7, 0.3)
43
-
44
- # randomly pick one of these 3 and write it to the featurestore
45
- pick_random = random.uniform(0,3)
46
- if pick_random >= 2:
47
- iris_df = virginica_df
48
- print("Virginica added")
49
- elif pick_random >= 1:
50
- iris_df = versicolor_df
51
- print("Versicolor added")
52
- else:
53
- iris_df = setosa_df
54
- print("Setosa added")
55
-
56
- return iris_df
57
-
58
-
59
-
60
- def g():
61
- import hopsworks
62
- import pandas as pd
63
-
64
- project = hopsworks.login()
65
- fs = project.get_feature_store()
66
-
67
- if BACKFILL == True:
68
- iris_df = pd.read_csv("https://repo.hops.works/master/hopsworks-tutorials/data/iris.csv")
69
- else:
70
- iris_df = get_random_iris_flower()
71
-
72
- iris_fg = fs.get_or_create_feature_group(
73
- name="iris_modal",
74
- version=1,
75
- primary_key=["sepal_length","sepal_width","petal_length","petal_width"],
76
- description="Iris flower dataset")
77
- iris_fg.insert(iris_df, write_options={"wait_for_job" : False})
78
-
79
- if __name__ == "__main__":
80
- if LOCAL == True :
81
- g()
82
- else:
83
- with stub.run():
84
- f()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
iris-feature-pipeline.py DELETED
@@ -1,33 +0,0 @@
1
- import os
2
- import modal
3
-
4
- LOCAL=False
5
-
6
- if LOCAL == False:
7
- stub = modal.Stub()
8
- image = modal.Image.debian_slim().pip_install(["hopsworks","joblib","seaborn","sklearn","dataframe-image"])
9
-
10
- @stub.function(image=image, schedule=modal.Period(days=1), secret=modal.Secret.from_name("jim-hopsworks-ai"))
11
- def f():
12
- g()
13
-
14
- def g():
15
- import hopsworks
16
- import pandas as pd
17
-
18
- project = hopsworks.login()
19
- fs = project.get_feature_store()
20
- iris_df = pd.read_csv("https://repo.hops.works/master/hopsworks-tutorials/data/iris.csv")
21
- iris_fg = fs.get_or_create_feature_group(
22
- name="iris_modal",
23
- version=1,
24
- primary_key=["sepal_length","sepal_width","petal_length","petal_width"],
25
- description="Iris flower dataset")
26
- iris_fg.insert(iris_df, write_options={"wait_for_job" : False})
27
-
28
- if __name__ == "__main__":
29
- if LOCAL == True :
30
- g()
31
- else:
32
- with stub.run():
33
- f()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
iris-training-pipeline.py DELETED
@@ -1,100 +0,0 @@
1
- import os
2
- import modal
3
-
4
- LOCAL=True
5
-
6
- if LOCAL == False:
7
- stub = modal.Stub()
8
- image = modal.Image.debian_slim().apt_install(["libgomp1"]).pip_install(["hopsworks", "seaborn", "joblib", "scikit-learn"])
9
-
10
- @stub.function(image=image, schedule=modal.Period(days=1), secret=modal.Secret.from_name("jim-hopsworks-ai"))
11
- def f():
12
- g()
13
-
14
-
15
- def g():
16
- import hopsworks
17
- import pandas as pd
18
- from sklearn.neighbors import KNeighborsClassifier
19
- from sklearn.metrics import accuracy_score
20
- from sklearn.metrics import confusion_matrix
21
- from sklearn.metrics import classification_report
22
- import seaborn as sns
23
- from matplotlib import pyplot
24
- from hsml.schema import Schema
25
- from hsml.model_schema import ModelSchema
26
- import joblib
27
-
28
- # You have to set the environment variable 'HOPSWORKS_API_KEY' for login to succeed
29
- project = hopsworks.login()
30
- # fs is a reference to the Hopsworks Feature Store
31
- fs = project.get_feature_store()
32
-
33
- # The feature view is the input set of features for your model. The features can come from different feature groups.
34
- # You can select features from different feature groups and join them together to create a feature view
35
- try:
36
- feature_view = fs.get_feature_view(name="iris_modal", version=1)
37
- except:
38
- iris_fg = fs.get_feature_group(name="iris_modal", version=1)
39
- query = iris_fg.select_all()
40
- feature_view = fs.create_feature_view(name="iris_modal",
41
- version=1,
42
- description="Read from Iris flower dataset",
43
- labels=["variety"],
44
- query=query)
45
-
46
- # You can read training data, randomly split into train/test sets of features (X) and labels (y)
47
- X_train, X_test, y_train, y_test = feature_view.train_test_split(0.2)
48
-
49
- # Train our model with the Scikit-learn K-nearest-neighbors algorithm using our features (X_train) and labels (y_train)
50
- model = KNeighborsClassifier(n_neighbors=2)
51
- model.fit(X_train, y_train.values.ravel())
52
-
53
- # Evaluate model performance using the features from the test set (X_test)
54
- y_pred = model.predict(X_test)
55
-
56
- # Compare predictions (y_pred) with the labels in the test set (y_test)
57
- metrics = classification_report(y_test, y_pred, output_dict=True)
58
- results = confusion_matrix(y_test, y_pred)
59
-
60
- # Create the confusion matrix as a figure, we will later store it as a PNG image file
61
- df_cm = pd.DataFrame(results, ['True Setosa', 'True Versicolor', 'True Virginica'],
62
- ['Pred Setosa', 'Pred Versicolor', 'Pred Virginica'])
63
- cm = sns.heatmap(df_cm, annot=True)
64
- fig = cm.get_figure()
65
-
66
- # We will now upload our model to the Hopsworks Model Registry. First get an object for the model registry.
67
- mr = project.get_model_registry()
68
-
69
- # The contents of the 'iris_model' directory will be saved to the model registry. Create the dir, first.
70
- model_dir="iris_model"
71
- if os.path.isdir(model_dir) == False:
72
- os.mkdir(model_dir)
73
-
74
- # Save both our model and the confusion matrix to 'model_dir', whose contents will be uploaded to the model registry
75
- joblib.dump(model, model_dir + "/iris_model.pkl")
76
- fig.savefig(model_dir + "/confusion_matrix.png")
77
-
78
-
79
- # Specify the schema of the model's input/output using the features (X_train) and labels (y_train)
80
- input_schema = Schema(X_train)
81
- output_schema = Schema(y_train)
82
- model_schema = ModelSchema(input_schema, output_schema)
83
-
84
- # Create an entry in the model registry that includes the model's name, desc, metrics
85
- iris_model = mr.python.create_model(
86
- name="iris_modal",
87
- metrics={"accuracy" : metrics['accuracy']},
88
- model_schema=model_schema,
89
- description="Iris Flower Predictor"
90
- )
91
-
92
- # Upload the model to the model registry, including all files in 'model_dir'
93
- iris_model.save(model_dir)
94
-
95
- if __name__ == "__main__":
96
- if LOCAL == True :
97
- g()
98
- else:
99
- with stub.run():
100
- f()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
requirements.txt CHANGED
@@ -1,5 +1,3 @@
1
  hopsworks
2
  joblib
3
  scikit-learn
4
- seaborn
5
- dataframe-image
 
1
  hopsworks
2
  joblib
3
  scikit-learn