### (1) Generate simulated data import numpy.random as rnd import gradio as gr import matplotlib import matplotlib.pyplot as plt import numpy as np import pandas as pd from sklearn.linear_model import LinearRegression import sympy as sp np.random.seed(42) m = 100 X1 = 6 * np.random.rand(m,1) - 3 X2 = X1 ** 2 y = X1 + 0.5 * X2 + 4 + np.random.randn(m,1) m = 50 X1_test = 6 * np.random.rand(m,1) - 3 X2_test = X1_test ** 2 y_test = X1_test + 0.5 * X2_test + 4 + np.random.randn(m,1) import pandas as pd data = pd.DataFrame({'y': y.flatten(), 'X1': X1.flatten(), 'X2': X2.flatten()}, columns=['y', 'X1', 'X2'],) data def draw_polynomial(degree=2): from sklearn.preprocessing import PolynomialFeatures poly_features = PolynomialFeatures(degree=degree, include_bias=False) X_poly = poly_features.fit_transform(X1) from sklearn.linear_model import LinearRegression lin_reg = LinearRegression() lin_reg.fit(X_poly, y) lin_reg.intercept_, lin_reg.coef_ # Sample polynomial coefficients coefficients = list(lin_reg.intercept_)+list(lin_reg.coef_[0]) coefficients = [np.round(val,3) for val in coefficients] print("coefficients: ",coefficients) # Create a symbolic variable for x x = sp.symbols('x') # Create the symbolic polynomial expression polynomial_expr = sum(coeff * x**power for power, coeff in enumerate(coefficients)) # Convert the symbolic expression to a LaTeX string latex_expression = sp.latex(polynomial_expr, mode='inline') latex_expression = 'y = '+latex_expression.replace('$','') ## Visualized fitted quadratic line on training/testing data X_new=np.linspace(-3, 3, 100).reshape(100, 1) X_new_poly = poly_features.transform(X_new) y_new = lin_reg.predict(X_new_poly) ### get training error using polynomial model X_poly = poly_features.transform(X1) y_predict = lin_reg.predict(X_poly) y_predict train_error = np.mean((y_predict - y)**2) print("train_error: ",train_error) ### get testing error using polynomial model X_test_poly = poly_features.transform(X1_test) y_test_predict = lin_reg.predict(X_test_poly) y_test_predict test_error = np.mean((y_test_predict - y_test)**2) print("test_error: ",test_error) X = np.array([[train_error,test_error]]) results = pd.DataFrame(X, columns=['Training Error','Test Error']) fig = plt.figure(figsize=(12,10)) plt.subplot(2,1,1) plt.plot(X1, y, "b.") plt.plot(X1_test, y_test, "g.", markersize=12) plt.plot(X_new, y_new, "r-", linewidth=2, label="Predictions") plt.xlabel("$x_1$", fontsize=18) plt.ylabel("$y$", rotation=0, fontsize=18) plt.legend(loc="upper left", fontsize=14) plt.axis([-3, 3, 0, 10]) plt.subplot(2,1,2) plt.plot([i+1 for i in range(0, len(lin_reg.coef_[0]))], lin_reg.coef_[0], 'r-') plt.xlabel("Parameters", fontsize=14) plt.ylabel("Values", fontsize=14) plt.xticks([i+1 for i in range(0, len(lin_reg.coef_[0]))], [i+1 for i in range(0, len(lin_reg.coef_[0]))]) plt.xlim(0,len(lin_reg.coef_[0])+1) #plt.show() fig.tight_layout() plt.savefig('plot_line.png', dpi=300) return latex_expression, results, 'plot_line.png' #### Define input component input_degree = gr.inputs.Slider(1, 27, step=1, default=2, label='Degree of Polynomial Regression') #### Define output component set_formula = gr.outputs.Textbox(label ='Polynomial Model') set_output = gr.outputs.Dataframe(type='pandas', label ='Evaluation Results') output_plot1 = gr.outputs.Image(label="Regression plot", type='pil') ### configure gradio, detailed can be found at https://www.gradio.app/docs/#i_slider interface = gr.Interface(fn=draw_polynomial, inputs=[input_degree], outputs=[set_formula, set_output, output_plot1], title="CSCI4750/5750: Polynomial Regression models \n (Model Complexity)", theme = 'huggingface', layout = 'vertical' ) interface.launch(debug=True)