Kolpitor commited on
Commit
82d6f66
·
1 Parent(s): 4cba66e

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +174 -0
app.py ADDED
@@ -0,0 +1,174 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ os.system('git clone --recursive https://github.com/dmlc/xgboost')
3
+ os.system('cd xgboost')
4
+ os.system('sudo cp make/minimum.mk ./config.mk;')
5
+ os.system('sudo make -j4;')
6
+ os.system('sh build.sh')
7
+ os.system('cd python-package')
8
+ os.system('python setup.py install')
9
+ os.system('pip install graphviz')
10
+ os.system('pip install python-pydot')
11
+ os.system('pip install python-pydot-ng')
12
+ os.system('pip install -U scikit-learn scipy matplotlib')
13
+ os.system('pip install wandb --upgrade')
14
+ os.system('pip install tensorboardX --upgrade')
15
+ os.system('pip install ipython --upgrade')
16
+ os.system('wandb login 5a0e81f39777351977ce52cf57ea09c4f48f3d93 --relogin')
17
+
18
+ from collections import namedtuple
19
+ import altair as alt
20
+ import math
21
+ import streamlit as st
22
+ import pandas
23
+ import numpy
24
+ import xgboost
25
+ import graphviz
26
+ from sklearn.metrics import mean_squared_error
27
+ from sklearn.model_selection import train_test_split
28
+ import matplotlib.pyplot
29
+ os.system('load_ext tensorboard')
30
+ import os
31
+ import datetime
32
+ from tensorboardX import SummaryWriter
33
+ import wandb
34
+ from wandb.xgboost import wandb_callback
35
+
36
+ wandb.init(project="australian_rain", entity="epitech1")
37
+
38
+ """
39
+ # MLOPS
40
+ """
41
+
42
+
43
+ max_depth_input = st.slider("Max depth", 1, 100, 5)
44
+ colsample_bytree_input = st.slider("Colsample bytree", 0.0, 1.0, 0.5)
45
+ learning_rate_input = st.slider("Learning rate", 0.0, 1.0, 0.2)
46
+ alpha_input = st.slider("Alpha", 1, 100, 10)
47
+ n_estimators_input = st.slider("n estimators", 1, 100, 20)
48
+ city_input = st.selectbox(
49
+ 'Which city do you want to predict rain ?',
50
+ ("Canberra",
51
+ "Albury",
52
+ "Penrith",
53
+ "Sydney",
54
+ "MountGinini",
55
+ "Bendigo",
56
+ "Brisbane",
57
+ "Portland"), index=0)
58
+
59
+ dataset = pandas.read_csv('weatherAUS.csv')
60
+
61
+ location_dataset = dataset["Location"].unique()
62
+ wind_dataset = dataset["WindGustDir"].unique()
63
+ date_dataset = dataset["Date"].unique()
64
+
65
+ dataset.drop(dataset.loc[dataset['Location'] != city_input].index, inplace=True)
66
+
67
+ i_RainTomorrow = dataset.columns.get_loc("RainTomorrow")
68
+ #i_Location = dataset.columns.get_loc("Location")
69
+ i_WindGustDir = dataset.columns.get_loc("WindGustDir")
70
+ i_Date = dataset.columns.get_loc("Date")
71
+ yes = dataset.iat[8, dataset.columns.get_loc("RainTomorrow")]
72
+ no = dataset.iat[0, dataset.columns.get_loc("RainTomorrow")]
73
+
74
+ for i in range(len(dataset)):
75
+ if (dataset.iat[i, i_RainTomorrow] == yes):
76
+ dataset.iat[i, i_RainTomorrow] = True
77
+ else:
78
+ dataset.iat[i, i_RainTomorrow] = False
79
+ #dataset.iat[i, i_Location] = numpy.where(location_dataset == dataset.iat[i, i_Location])[0][0]
80
+ if (pandas.isna(dataset.iat[i, i_WindGustDir])):
81
+ dataset.iat[i, i_WindGustDir] = 0
82
+ else:
83
+ dataset.iat[i, i_WindGustDir] = numpy.where(wind_dataset == dataset.iat[i, i_WindGustDir])[0][0] + 1
84
+ dataset.iat[i, i_Date] = numpy.where(date_dataset == dataset.iat[i, i_Date])[0][0]
85
+
86
+
87
+ dataset = dataset.astype({'RainTomorrow': 'bool'})
88
+ #dataset = dataset.astype({'Location': 'int'})
89
+ dataset = dataset.astype({'WindGustDir': 'int'})
90
+ dataset = dataset.astype({'Date': 'int'})
91
+
92
+ dataset.drop(columns=["WindDir9am", "WindDir3pm", "WindSpeed9am", "WindSpeed3pm", "Temp9am", "Temp3pm", "RainToday"], inplace=True)
93
+ dataset.drop(dataset.index[dataset.isnull().any(axis=1)], 0, inplace=True)
94
+
95
+ dataset["Humidity"] = 0.0
96
+ dataset["Pressure"] = 0.0
97
+ dataset["Cloud"] = 0.0
98
+
99
+ for i in dataset.index:
100
+ humidity = (dataset["Humidity9am"][i] + dataset["Humidity3pm"][i]) / 2
101
+ dataset.at[i, "Humidity"] = humidity
102
+ pressure = (dataset["Pressure9am"][i] + dataset["Pressure3pm"][i]) / 2
103
+ dataset.at[i, "Pressure"] = pressure
104
+ cloud = (dataset["Cloud9am"][i] + dataset["Cloud3pm"][i]) / 2
105
+ dataset.at[i, "Cloud"] = cloud
106
+
107
+ dataset.drop(columns=["Humidity9am", "Humidity3pm", "Pressure9am", "Pressure3pm", "Cloud9am", "Cloud3pm"], inplace=True)
108
+
109
+ x, y = dataset.iloc[:,[False, False, True, True, False, True, True, True, True, True, True, True, True]],dataset.iloc[:,4]
110
+
111
+ data_dmatrix = xgboost.DMatrix(data=x,label=y)
112
+
113
+ X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2, random_state=123)
114
+
115
+ class TensorBoardCallback(xgboost.callback.TrainingCallback):
116
+ def __init__(self, experiment: str = None, data_name: str = None):
117
+ self.experiment = experiment or "logs"
118
+ self.data_name = data_name or "test"
119
+ self.datetime_ = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
120
+ self.log_dir = f"runs/{self.experiment}/{self.datetime_}"
121
+ self.train_writer = SummaryWriter(log_dir=os.path.join(self.log_dir, "train/"))
122
+ if self.data_name:
123
+ self.test_writer = SummaryWriter(log_dir=os.path.join(self.log_dir, f"{self.data_name}/"))
124
+
125
+ def after_iteration(
126
+ self, model, epoch: int, evals_log: xgboost.callback.TrainingCallback.EvalsLog
127
+ ) -> bool:
128
+ if not evals_log:
129
+ return False
130
+
131
+ for data, metric in evals_log.items():
132
+ for metric_name, log in metric.items():
133
+ score = log[-1][0] if isinstance(log[-1], tuple) else log[-1]
134
+ if data == "train":
135
+ self.train_writer.add_scalar(metric_name, score, epoch)
136
+ else:
137
+ self.test_writer.add_scalar(metric_name, score, epoch)
138
+
139
+ return False
140
+
141
+ xg_reg = xgboost.XGBRegressor(colsample_bytree = colsample_bytree_input, learning_rate = learning_rate_input, max_depth = max_depth_input, alpha = alpha_input, n_estimators = n_estimators_input, eval_metric = ['rmse', 'error', 'logloss', 'map'],
142
+ callbacks=[TensorBoardCallback(experiment='exp_1', data_name='test')])
143
+
144
+ xg_reg.fit(X_train,y_train, eval_set=[(X_train, y_train)])
145
+
146
+ preds = xg_reg.predict(X_test)
147
+
148
+ rmse = numpy.sqrt(mean_squared_error(y_test, preds))
149
+ st.write("RMSE: %f" % (rmse))
150
+
151
+ params = {'colsample_bytree': colsample_bytree_input,'learning_rate': learning_rate_input,
152
+ 'max_depth': max_depth_input, 'alpha': alpha_input}
153
+
154
+ cv_results = xgboost.cv(dtrain=data_dmatrix, params=params, nfold=3,
155
+ num_boost_round=50,early_stopping_rounds=10,metrics="rmse", as_pandas=True, seed=123)
156
+
157
+ st.write((cv_results["test-rmse-mean"]).tail(1))
158
+
159
+ xg_reg = xgboost.train(params=params, dtrain=data_dmatrix, num_boost_round=10)
160
+
161
+ os.system('tensorboard --logdir runs')
162
+
163
+ #xgboost.plot_tree(xg_reg,num_trees=0)
164
+ #matplotlib.pyplot.rcParams['figure.figsize'] = [200, 200]
165
+ #matplotlib.pyplot.show()
166
+
167
+ #xgboost.plot_importance(xg_reg)
168
+ #matplotlib.pyplot.rcParams['figure.figsize'] = [5, 5]
169
+ #matplotlib.pyplot.show()
170
+
171
+ #xg_reg = xgboost.train(params=params, dtrain=data_dmatrix, num_boost_round=10, callbacks=[wandb_callback()])
172
+
173
+ # MLOPS - W&B analytics
174
+ # added the wandb to the callbacks