Spaces:
Sleeping
Sleeping
File size: 10,069 Bytes
9c7b739 8c8fd10 9c7b739 8c8fd10 b3fb6d0 acb7c67 6fdead5 42e3312 8c8fd10 9c7b739 8c8fd10 acb7c67 9c7b739 613948b 42e3312 9c7b739 acb7c67 42e3312 9c7b739 8c8fd10 8076a34 2364fd8 8c8fd10 42e3312 8c8fd10 42e3312 8c8fd10 9c7b739 8c8fd10 9c7b739 8c8fd10 9c7b739 8c8fd10 2364fd8 0e0ffa8 b3fb6d0 8c8fd10 2048cd4 9c7b739 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 |
import gradio as gr
import pandas as pd
import numpy as np
import math
import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import LSTM
import yfinance as yf
from statsmodels.tsa.seasonal import seasonal_decompose
on_load="""
async()=>{
console.log("HELLO");
}
"""
def get_ans():
plt.close()
inp = "NVDA"
tickers = yf.Tickers(inp)
x = tickers.tickers[inp].history(period="15y")
df = x
df.reset_index(inplace=True)
df1 = df.reset_index()['Close']
df['Date'] = pd.to_datetime(df['Date'])
scaler = MinMaxScaler(feature_range=(0, 1))
df1 = scaler.fit_transform(np.array(df1).reshape(-1, 1))
training_size = int(len(df1) * 0.65)
test_size = len(df1) - training_size
train_data, test_data = df1[0:training_size, :], df1[training_size:len(df1), :1]
def create_dataset(dataset, time_step=1):
dataX, dataY = [], []
for i in range(len(dataset) - time_step - 1):
a = dataset[i:(i + time_step), 0]
dataX.append(a)
dataY.append(dataset[i + time_step, 0])
return np.array(dataX), np.array(dataY)
time_step = 100
X_train, y_train = create_dataset(train_data, time_step)
X_test, ytest = create_dataset(test_data, time_step)
X_train = X_train.reshape(X_train.shape[0], X_train.shape[1], 1)
X_test = X_test.reshape(X_test.shape[0], X_test.shape[1], 1)
model = Sequential()
model.add(LSTM(50, return_sequences=True, input_shape=(100, 1)))
model.add(LSTM(50, return_sequences=True))
model.add(LSTM(50))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
model.fit(X_train,y_train,validation_data=(X_test,ytest),epochs=2,batch_size=64,verbose=1)
train_predict=model.predict(X_train)
test_predict=model.predict(X_test)
train_predict=scaler.inverse_transform(train_predict)
test_predict=scaler.inverse_transform(test_predict)
look_back=100
trainPredictPlot = np.empty_like(df1)
trainPredictPlot[:, :] = np.nan
trainPredictPlot[look_back:len(train_predict)+look_back, :] = train_predict
# shift test predictions for plotting
testPredictPlot = np.empty_like(df1)
testPredictPlot[:, :] = np.nan
testPredictPlot[len(train_predict)+(look_back*2)+1:len(df1)-1, :] = test_predict
# plot baseline and predictions
plt.plot(scaler.inverse_transform(df1))
plt.plot(trainPredictPlot)
plt.plot(testPredictPlot)
x_input=test_data[341:].reshape(1,-1)
resize_var = x_input.size
temp_input=list(x_input)
temp_input=temp_input[0].tolist()
lst_output=[]
n_steps=100
i=0
while(i<30):
if(len(temp_input)>100):
#print(temp_input)
x_input=np.array(temp_input[1:])
# print("{} day input {}".format(i,x_input))
x_input=x_input.reshape(1,-1)
x_input = x_input.reshape((1, x_input.size, 1))
#print(x_input)
yhat = model.predict(x_input, verbose=0)
# print("{} day output {}".format(i,yhat))
temp_input.extend(yhat[0].tolist())
temp_input=temp_input[1:]
#print(temp_input)
lst_output.extend(yhat.tolist())
i=i+1
else:
x_input = x_input.reshape((1, n_steps,1))
yhat = model.predict(x_input, verbose=0)
# print(yhat[0])
temp_input.extend(yhat[0].tolist())
# print(len(temp_input))
lst_output.extend(yhat.tolist())
i=i+1
day_new=np.arange(1,101)
day_pred=np.arange(101,131)
df3=df1. tolist()
df3.extend (lst_output)
len_lis = len(lst_output)
df3=pd.DataFrame(df3, columns=['Values'])
df3['index']=range(1, len(df3) + 1)
lst_output = pd.DataFrame(lst_output, columns=["Values"])
lst_output['index']=range(1, len(lst_output) + 1)
the_max = max(np.asarray(df['Open']))
df3['Values'] = [i * the_max for i in df3['Values']]
return plt, gr.update(visible=True,value=df, x="Date",y="Open", height=500, width=800),gr.update(visible=True,value=df[-300:], x="Date",y="Open", height=500, width=800),gr.update(visible=True,value=df[-30:], x="Date",y="Open", height=500, width=800), max(np.asarray(df['Open'])), min(np.asarray(df['Open'])), max(np.asarray(df['Open'])[-300:]), min(np.asarray(df['Open'][-300:])), max(np.asarray(df['Open'])[-30:]), min(np.asarray(df['Open'][-30:])), (max(np.asarray(df['Open']))) * (lst_output["Values"][0]), gr.update(visible=True,value=lst_output, x="index",y="Values", height=500, width=800), gr.update(visible=True,value=df3, x="index",y="Values", height=500, width=800), gr.update(visible=True,value=df3[-300:], x="index",y="Values", height=500, width=800)
def get_seo():
plt.close()
time_step = 100
inp = "NVDA"
tickers = yf.Tickers(inp)
x = tickers.tickers[inp].history(period="15y")
df = x
df.reset_index(inplace=True)
df1 = df.reset_index()['Close']
df['Date'] = pd.to_datetime(df['Date'])
scaler = MinMaxScaler(feature_range=(0, 1))
df1 = scaler.fit_transform(np.array(df1).reshape(-1, 1))
def create_dataset(dataset, time_step=1):
dataX, dataY = [], []
for i in range(len(dataset) - time_step - 1):
a = dataset[i:(i + time_step), 0]
dataX.append(a)
dataY.append(dataset[i + time_step, 0])
return np.array(dataX), np.array(dataY)
X_train, y_train = create_dataset(df1, time_step)
decompose_result_mult = seasonal_decompose(X_train, model="additive", period=time_step)
trend = decompose_result_mult.trend
seasonal = decompose_result_mult.seasonal
residual = decompose_result_mult.resid
z = [i[0] for i in trend]
z = pd.DataFrame(z, columns=['Values'])
z['index'] = range(1, len(z) + 1)
y = [i[0] for i in seasonal]
y = pd.DataFrame(y, columns=['Values'])
y['index'] = range(1, len(z) + 1)
a = [i[0] for i in residual]
a = pd.DataFrame(a, columns=['Values'])
a['index'] = range(1, len(a) + 1)
return gr.update(visible=True, value=z, x='index', y='Values', height=500, width=800), gr.update(visible=True, value=y[:100], x='index', y='Values', height=500, width=800), gr.update(visible=True, value=a, x='index', y='Values', height=500, width=800)
def get_info():
inp = "NVDA"
tickers = yf.Ticker(inp)
info = tickers.info
balance = tickers.balance_sheet
long_info= info['longBusinessSummary']
curr_rat = info['currentRatio']
quick_rat = info['quickRatio']
short_rat = info['shortRatio']
debt_eq = info['debtToEquity']
volume = info['volume']
market_cap = info['marketCap']
curr_price = info['currentPrice']
rev_per = info['revenuePerShare']
return long_info, curr_rat, quick_rat, short_rat, debt_eq, volume, market_cap, curr_price, rev_per
with gr.Blocks() as demo:
with gr.Row().style(equal_height=True):
with gr.Column():
gr.Markdown("<center><h1>Stock Analysis NVDA<h1></center>")
with gr.Row():
with gr.Column():
Name_of_the_company = "NVDA"
gr.Markdown("<center><h2>Analysis<h2></center>")
gr.Markdown("<center><h3>Inportant Information</h3></center>")
info1 = gr.Textbox()
gr.Markdown("<h4>Insightful Ratios</h4>")
with gr.Row():
ratio1 = gr.Textbox(label='Current Ratio')
ratio2 = gr.Textbox(label='Quick Ratio')
ratio3 = gr.Textbox(label='Short Ratio')
ratio4 = gr.Textbox(label='Debt to Equity Ratio')
gr.Markdown("<center><h3>General Information</h3></center>")
with gr.Row():
curr_price = gr.Textbox(label='Current Price of Stock')
rev_per = gr.Textbox(label='Revenue per Share')
vol = gr.Textbox(label='Volume')
mar_cap = gr.Textbox(label='Market Cap')
gr.Markdown("<h3>Regression Trends of Price<h3>")
with gr.Tab("Overall Trend"):
trend_g = gr.LinePlot(visible=False, label='Trend of stock over its lifetime', height=1000, width=1000)
with gr.Tab("Seasonal Trends"):
Seaso = gr.LinePlot(visible=False,label="This is for one season", height=1000, width=1000)
with gr.Tab("Residual Variation"):
resid = gr.LinePlot(visible=False, label="Residual Variation over time", height=1000, width=1000)
mp = gr.Plot()
gr.Markdown("<h3>Price over time<h3>")
with gr.Tab("All Time"):
mp1 = gr.LinePlot(visible=False, label="All time", height=1000, width=1000)
with gr.Row():
Max_all = gr.Textbox(placeholder="The Maximum price the stock has ever reached", label='Maximum of all time')
Min_all = gr.Textbox(placeholder="The Minimum price the stock has ever reached", label="Minimum of all time")
with gr.Tab("Past year"):
mp2 = gr.LinePlot(visible=False, label="Last year")
with gr.Row():
Max_year = gr.Textbox(placeholder="The Maximum price for the last year", label='Maximum')
Min_year = gr.Textbox(placeholder="The Minimum price for the last year", label="Minimum")
with gr.Tab("Past few Days"):
mp3 = gr.LinePlot(visible=False, label="Past few Days")
with gr.Row():
Max_rec = gr.Textbox(placeholder="The Maximum price for the last few days", label='Recent Maximum')
Min_rec = gr.Textbox(placeholder="The Minimum price for the last few days", label="Recent Minimum")
gr.Markdown("<center><h2>Predictive Analysis</h2></center>")
Next_day = gr.Textbox(placeholder="Predicted price for tomorrow", label="Predicted price for Tomorrow")
Next_plot = gr.LinePlot(visible=False)
Next_plot_all = gr.LinePlot(visible=False)
Next_plot_year = gr.LinePlot(visible=False)
demo.load(get_ans, outputs= [mp,mp1,mp2,mp3, Max_all, Min_all,Max_year, Min_year, Max_rec, Min_rec, Next_day, Next_plot, Next_plot_all, Next_plot_year], _js=on_load)
demo.load(get_info, outputs=[info1, ratio1, ratio2, ratio3, ratio4, vol, mar_cap, curr_price, rev_per], _js=on_load)
demo.load(get_seo, outputs=[trend_g, Seaso, resid], _js=on_load)
demo.launch(inline=False) |