Spaces:
Sleeping
Sleeping
File size: 5,415 Bytes
dbc69f1 4e5436b dbc69f1 4e5436b dbc69f1 3350bce 4e5436b 0351882 4e5436b 0351882 39dad61 0351882 4f442da 39dad61 4e5436b 0351882 4e5436b 3350bce 4e5436b 0351882 4e5436b 9d8fff6 4e5436b dbc69f1 4e5436b 0351882 4e5436b dbc69f1 0351882 4e5436b 0351882 4e5436b 9d8fff6 4e5436b 39dad61 0351882 39dad61 0351882 39dad61 0351882 4e5436b dbc69f1 c5963a7 0351882 4e5436b 0351882 4e5436b 0351882 4e5436b dbc69f1 3350bce 4e5436b 0351882 4e5436b 0351882 4e5436b dbc69f1 4e5436b dbc69f1 4e5436b 0351882 4e5436b 0351882 4e5436b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 |
import gradio as gr
import pandas as pd
import numpy as np
from datetime import datetime, timedelta
import yfinance as yf
from sklearn.preprocessing import MinMaxScaler
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import LSTM, Dense
import plotly.graph_objects as go
def fetch_ethereum_data():
"""
Fetch historical Ethereum price data using yfinance.
Returns DataFrame with datetime index and price information.
The data is sampled hourly for the past week.
"""
eth_ticker = yf.Ticker("ETH-USD")
# Get hourly data for the past week
hist_data = eth_ticker.history(period="7d", interval="1h")
# Keep the datetime index and Close price
return hist_data[['Close']]
def prepare_data(data, sequence_length=24):
"""
Prepare data for LSTM model by creating sequences and scaling.
Args:
data: DataFrame with price data and datetime index
sequence_length: Number of time steps to use for prediction (default: 24 hours)
"""
# Scale the data
scaler = MinMaxScaler()
scaled_data = scaler.fit_transform(data['Close'].values.reshape(-1, 1))
# Create sequences for training
X, y = [], []
for i in range(sequence_length, len(scaled_data)):
X.append(scaled_data[i-sequence_length:i, 0])
y.append(scaled_data[i, 0])
X = np.array(X)
y = np.array(y)
# Reshape X for LSTM input
X = X.reshape(X.shape[0], X.shape[1], 1)
return X, y, scaler
def create_model(sequence_length):
"""
Create and compile LSTM model for time series prediction.
Uses a two-layer LSTM architecture followed by dense layers.
"""
model = Sequential([
LSTM(50, return_sequences=True, input_shape=(sequence_length, 1)),
LSTM(50, return_sequences=False),
Dense(25),
Dense(1)
])
model.compile(optimizer='adam', loss='mse')
return model
def predict_future_prices(model, last_sequence, scaler, days=7):
"""
Predict future prices using the trained model.
Args:
model: Trained LSTM model
last_sequence: Last sequence of known prices
scaler: Fitted MinMaxScaler
days: Number of days to predict (default: 7)
"""
future_predictions = []
current_sequence = last_sequence.copy()
# Convert days to hours since we're using hourly data
hours = days * 24
for _ in range(hours):
# Predict next price
scaled_prediction = model.predict(current_sequence.reshape(1, -1, 1), verbose=0)
# Inverse transform to get actual price
prediction = scaler.inverse_transform(scaled_prediction)[0][0]
future_predictions.append(prediction)
# Update sequence for next prediction
current_sequence = np.roll(current_sequence, -1)
current_sequence[-1] = scaled_prediction
return future_predictions
def create_prediction_plot(historical_data, future_predictions, future_dates):
"""
Create an interactive plot showing the last week of historical prices
and week-ahead predictions with hourly granularity.
Args:
historical_data: DataFrame with historical price data and datetime index
future_predictions: List of predicted prices
future_dates: List of future datetime indices for predictions
"""
fig = go.Figure()
# Plot historical data using the datetime index
fig.add_trace(go.Scatter(
x=historical_data.index,
y=historical_data['Close'],
name='Historical Prices',
line=dict(color='blue')
))
# Plot predictions
fig.add_trace(go.Scatter(
x=future_dates,
y=future_predictions,
name='Predictions',
line=dict(color='red', dash='dash')
))
fig.update_layout(
title='Ethereum Price Prediction (Hourly)',
xaxis_title='Date',
yaxis_title='Price (USD)',
hovermode='x unified'
)
return fig
def predict_ethereum():
"""
Main function for Gradio interface that orchestrates the prediction process.
Handles hourly data and generates predictions for the next week.
"""
# Fetch and prepare data
data = fetch_ethereum_data()
sequence_length = 24 # Use 24 hours of data for prediction
X, y, scaler = prepare_data(data, sequence_length)
# Create and train model
model = create_model(sequence_length)
model.fit(X, y, epochs=50, batch_size=32, verbose=0)
# Prepare last sequence for prediction
last_sequence = scaler.transform(data['Close'].values[-sequence_length:].reshape(-1, 1))
# Generate future predictions
future_predictions = predict_future_prices(model, last_sequence, scaler)
# Create future dates (hourly intervals)
last_date = data.index[-1]
future_dates = [last_date + timedelta(hours=i+1) for i in range(len(future_predictions))]
# Create and return plot
fig = create_prediction_plot(data, future_predictions, future_dates)
return fig
# Create Gradio interface
iface = gr.Interface(
fn=predict_ethereum,
inputs=None,
outputs=gr.Plot(),
title="Ethereum Price Prediction",
description="Click to generate a 7-day price prediction for Ethereum based on hourly historical data.",
theme=gr.themes.Base()
)
if __name__ == "__main__":
iface.launch() |