Jayabalambika commited on
Commit
7c95071
1 Parent(s): 68948f2

Update app.py

Browse files

incorporated review comments

Files changed (1) hide show
  1. app.py +52 -6
app.py CHANGED
@@ -43,19 +43,65 @@ def plot_loss_func():
43
  plt.ylabel("$L(y=1, f(x))$")
44
  return fig
45
 
 
 
 
46
  title = "SGD convex loss functions"
47
 
48
- # def greet(name):
49
- # return "Hello " + name + "!"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
  with gr.Blocks(title=title) as demo:
 
51
  gr.Markdown(f"# {title}")
52
-
 
53
 
54
  gr.Markdown(" **[Demo is based on sklearn docs](https://scikit-learn.org/stable/auto_examples/linear_model/plot_sgd_loss_functions.html#sphx-glr-auto-examples-linear-model-plot-sgd-loss-functions-py)**")
55
 
56
- btn = gr.Button(value="SGD convex loss functions")
57
- btn.click(plot_loss_func, outputs= gr.Plot() ) #
 
58
 
 
 
 
 
59
 
60
 
61
- demo.launch()
 
43
  plt.ylabel("$L(y=1, f(x))$")
44
  return fig
45
 
46
+
47
+
48
+
49
  title = "SGD convex loss functions"
50
 
51
+ detail = "This plot shows the convex loss functions supported by SGDClassifiers(Linear classifiers (SVM, logistic regression, etc.) with SGD training)."
52
+
53
+ def explain(name):
54
+ # print("name=",name)
55
+ if name == "0-1 loss":
56
+ docstr = "Explanation for " + name + ": " +\
57
+ " This is the simplest loss function used in classification problems. It counts how many mistakes a hypothesis function makes on a training set. " +\
58
+ " A loss of 1 is accounted if its mispredicted and a loss of 0 for the correct prediction. " +\
59
+ " This function is non differentiable and hence not used in Optimization problems. "
60
+ elif name == "Hinge loss":
61
+ docstr = "Explanation for " + name + ": " +\
62
+ " This is the loss function used in maximum-margin classification in SVMs. "+\
63
+ " Z_i = y_i*(w.T * x_i + b), if Z_i > 0 the point x_i is correctly classified and Z_i < 0 , x_i is incorrectly classified "+\
64
+ " Z_i >= 1, hinge loss =0 , Z_i < 1 , hinge loss = 1- Z_i "
65
+ elif name == "Perceptron loss":
66
+ docstr = "Explanation for " + name + ": " +\
67
+ " This is the linear loss function used in perceptron algorithm. "+\
68
+ " The binary classifier function which decides whether the input represented by vector of numbers belongs to a class or not. "
69
+
70
+ elif name == "Squared Hinge loss":
71
+ docstr = "Explanation for " + name + ":" +\
72
+ " This represents the square verison of Hinge loss and used in classification algorithms where Performance is important. "+\
73
+ " If we want a more fine decision boundary where we want to punish larger errors more significantly than the smaller errors. "
74
+
75
+ elif name == "Modified Huber loss":
76
+ docstr = "Explanation for " + name + ":" +\
77
+ " The Huber loss function balances the best of both Mean Squared Error and Mean Absolute Error. "+\
78
+ " Its a piecewise function and hyper parameter delta is to be found first and then loss optimization step."
79
+
80
+ else:
81
+ docstr = " Logistic Loss is a loss function used for Logistic Regression. Please refer wikipedia for the Log loss equation." +\
82
+ " L2 regularization is most important for logistic regression models. "
83
+
84
+
85
+ return docstr
86
+
87
+
88
+
89
  with gr.Blocks(title=title) as demo:
90
+
91
  gr.Markdown(f"# {title}")
92
+ gr.Markdown(f"# {detail}")
93
+
94
 
95
  gr.Markdown(" **[Demo is based on sklearn docs](https://scikit-learn.org/stable/auto_examples/linear_model/plot_sgd_loss_functions.html#sphx-glr-auto-examples-linear-model-plot-sgd-loss-functions-py)**")
96
 
97
+ with gr.Column(variant="panel"):
98
+ btn = gr.Button(value="SGD convex loss functions")
99
+ btn.click(plot_loss_func, outputs= gr.Plot() ) #
100
 
101
+ dd = gr.Dropdown(["0-1 loss", "Hinge loss", "Perceptron loss", "Squared Hinge loss", "Modified Huber loss", "Log Loss"], label="loss", info="Select a Loss from the dropdown for a detailed explanation")
102
+ # inp = gr.Textbox(placeholder="Select a Loss from the dropdown for a detailed explanation")
103
+ out = gr.Textbox(label="explanation of the loss function")
104
+ dd.change(explain, dd, out)
105
 
106
 
107
+ demo.launch()