File size: 13,832 Bytes
5a50d7c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
import time
import streamlit as st
import yfinance as yf
from functools import wraps
import pandas as pd
import numpy as np
import random
from datetime import datetime, timedelta
try:
    import pandas_datareader.data as web
    PANDAS_DATAREADER_AVAILABLE = True
except ImportError:
    PANDAS_DATAREADER_AVAILABLE = False
    st.warning("pandas_datareader not available. Install it with: pip install pandas-datareader")

class RateLimitManager:
    """Manages rate limiting for API calls"""
    
    def __init__(self, min_delay=3.0):
        self.min_delay = min_delay
        self.last_call_time = 0
    
    def wait_if_needed(self):
        """Ensure minimum delay between API calls"""
        current_time = time.time()
        time_since_last_call = current_time - self.last_call_time
        
        if time_since_last_call < self.min_delay:
            sleep_time = self.min_delay - time_since_last_call + random.uniform(0.5, 1.5)
            time.sleep(sleep_time)
        
        self.last_call_time = time.time()

# Global rate limit manager
rate_limiter = RateLimitManager()

def create_sample_data(ticker, period='1mo'):
    """Create sample data when API is unavailable"""
    
    # Define sample data for common tickers
    sample_data = {
        'NVDA': {'base_price': 450, 'volatility': 0.03, 'trend': 0.001},
        'AAPL': {'base_price': 190, 'volatility': 0.02, 'trend': 0.0005},
        'GOOGL': {'base_price': 140, 'volatility': 0.025, 'trend': 0.0008},
        'MSFT': {'base_price': 420, 'volatility': 0.02, 'trend': 0.0007},
        'AMZN': {'base_price': 150, 'volatility': 0.025, 'trend': 0.0006}
    }
    
    # Get parameters for ticker or use defaults
    params = sample_data.get(ticker, {'base_price': 100, 'volatility': 0.02, 'trend': 0.0005})
    
    # Generate date range based on period
    if period == 'max' or period == '1y':
        days = 252
    elif period == '6mo':
        days = 126
    elif period == '1mo':
        days = 30
    else:
        days = 30
    
    # Create date range
    end_date = datetime.now()
    start_date = end_date - timedelta(days=days)
    dates = pd.date_range(start=start_date, end=end_date, freq='D')
    
    # Remove weekends
    dates = dates[dates.weekday < 5]
    
    # Generate price data
    np.random.seed(42)  # For consistent sample data
    returns = np.random.normal(params['trend'], params['volatility'], len(dates))
    
    prices = [params['base_price']]
    for ret in returns[1:]:
        prices.append(prices[-1] * (1 + ret))
    
    # Create DataFrame
    df = pd.DataFrame(index=dates[:len(prices)])
    df['Close'] = prices
    df['Open'] = df['Close'].shift(1).fillna(df['Close'])
    df['High'] = df['Close'] * (1 + np.random.uniform(0, 0.02, len(df)))
    df['Low'] = df['Close'] * (1 - np.random.uniform(0, 0.02, len(df)))
    df['Volume'] = np.random.randint(1000000, 10000000, len(df))
    
    return df

def retry_with_backoff(max_retries=5, base_delay=10):
    """Decorator for retrying functions with exponential backoff"""
    def decorator(func):
        @wraps(func)
        def wrapper(*args, **kwargs):
            for attempt in range(max_retries):
                try:
                    rate_limiter.wait_if_needed()
                    return func(*args, **kwargs)
                except Exception as e:
                    error_msg = str(e).lower()
                    
                    if any(keyword in error_msg for keyword in ['rate', 'limit', '429', 'too many requests']):
                        if attempt < max_retries - 1:
                            wait_time = base_delay * (2 ** attempt) + random.uniform(2, 5)
                            st.warning(f"🚫 Rate limit hit. Waiting {wait_time:.1f} seconds before retry {attempt + 2}/{max_retries}...")
                            time.sleep(wait_time)
                            continue
                        else:
                            st.error("⏱️ Rate limit exceeded after all retries. Using sample data.")
                            return None
                    elif any(keyword in error_msg for keyword in ['expecting value', 'no timezone', 'delisted', 'json']):
                        if attempt < max_retries - 1:
                            wait_time = base_delay + random.uniform(2, 4)
                            st.warning(f"πŸ”„ Data parsing error. Retrying in {wait_time:.1f} seconds... (attempt {attempt + 2}/{max_retries})")
                            time.sleep(wait_time)
                            continue
                        else:
                            st.warning("⚠️ Unable to fetch real data. Using sample data for demonstration.")
                            return None
                    else:
                        if attempt < max_retries - 1:
                            wait_time = base_delay + random.uniform(1, 3)
                            st.warning(f"❗ Error: {str(e)[:100]}... Retrying in {wait_time:.1f} seconds...")
                            time.sleep(wait_time)
                            continue
                        else:
                            st.error(f"❌ Failed after {max_retries} attempts: {str(e)[:100]}...")
                            return None
            return None
        return wrapper
    return decorator

def fetch_data_with_stooq(ticker_symbol, start_date=None, end_date=None, period='1mo'):
    """Fetch stock data using pandas_datareader with stooq as source"""
    if not PANDAS_DATAREADER_AVAILABLE:
        return None
    
    try:
        # Convert period to date range if start/end not provided
        if start_date is None or end_date is None:
            end_date = datetime.now()
            if period == 'max' or period == '1y':
                start_date = end_date - timedelta(days=365)
            elif period == '6mo':
                start_date = end_date - timedelta(days=180)
            elif period == '1mo':
                start_date = end_date - timedelta(days=30)
            elif period == '5d':
                start_date = end_date - timedelta(days=5)
            else:
                start_date = end_date - timedelta(days=30)
        
        # Fetch data from stooq
        df = web.DataReader(ticker_symbol, 'stooq', start_date, end_date)
        
        if df.empty:
            return None
        
        # Stooq returns data in reverse chronological order, so sort it
        df = df.sort_index()
        
        # Ensure we have the required columns
        required_columns = ['Open', 'High', 'Low', 'Close', 'Volume']
        if all(col in df.columns for col in required_columns):
            return df
        else:
            st.warning(f"Missing columns in stooq data: {[col for col in required_columns if col not in df.columns]}")
            return None
            
    except Exception as e:
        st.error(f"Error fetching data from stooq: {str(e)}")
        return None

def safe_yfinance_call(ticker_symbol, operation='history', **kwargs):
    """Safely call multiple data sources with fallback to sample data"""
    
    # First try stooq (pandas_datareader) for historical data
    if operation == 'history' and PANDAS_DATAREADER_AVAILABLE:
        try:
            st.sidebar.info(f"πŸ”„ Trying stooq API for {ticker_symbol}...")
            stooq_data = fetch_data_with_stooq(
                ticker_symbol, 
                start_date=kwargs.get('start'),
                end_date=kwargs.get('end'),
                period=kwargs.get('period', '1mo')
            )
            
            if stooq_data is not None and not stooq_data.empty:
                st.sidebar.success(f"βœ… Real data from stooq for {ticker_symbol}")
                return stooq_data
            else:
                st.sidebar.warning(f"⚠️ Stooq failed for {ticker_symbol}")
        except Exception as e:
            st.sidebar.warning(f"⚠️ Stooq error: {str(e)[:50]}...")
    
    # If stooq fails or for info operation, try yfinance as backup
    try:
        st.sidebar.info(f"πŸ”„ Trying yfinance API for {ticker_symbol}...")
        ticker = yf.Ticker(ticker_symbol)
        
        if operation == 'history':
            result = ticker.history(
                timeout=10,
                prepost=False,
                auto_adjust=True,
                back_adjust=False,
                repair=True,
                keepna=False,
                actions=False,
                **kwargs
            )
            
            if result is not None and not result.empty and len(result) > 0:
                st.sidebar.success(f"βœ… Real data from yfinance for {ticker_symbol}")
                return result
            else:
                st.sidebar.warning(f"⚠️ yfinance returned empty data for {ticker_symbol}")
                
        elif operation == 'info':
            result = ticker.info
            if result and isinstance(result, dict) and len(result) > 1:
                st.sidebar.success(f"βœ… Info from yfinance for {ticker_symbol}")
                return result
            else:
                st.sidebar.warning(f"⚠️ yfinance info empty for {ticker_symbol}")
            
        else:
            raise ValueError(f"Unsupported operation: {operation}")
            
    except Exception as e:
        st.sidebar.warning(f"⚠️ yfinance also failed: {str(e)[:50]}...")
    
    # Finally fallback to sample data
    if operation == 'history':
        st.sidebar.warning(f"πŸ“Š Using sample data for {ticker_symbol}")
        return create_sample_data(ticker_symbol, kwargs.get('period', '1mo'))
    elif operation == 'info':
        sample_prices = {
            'NVDA': 450, 'AAPL': 190, 'GOOGL': 140, 'MSFT': 420, 'AMZN': 150
        }
        base_price = sample_prices.get(ticker_symbol, 100)
        return {
            'symbol': ticker_symbol,
            'shortName': f'{ticker_symbol} Inc.',
            'currentPrice': base_price + random.uniform(-2, 2),
            'previousClose': base_price
        }
    else:
        raise Exception(f"All data sources failed for {ticker_symbol}")

def get_cached_data(cache_key, ttl_seconds=300):
    """Get cached data from session state if still valid"""
    if cache_key in st.session_state:
        cache_time_key = f"cache_time_{cache_key}"
        if cache_time_key in st.session_state:
            cache_time = st.session_state[cache_time_key]
            if time.time() - cache_time < ttl_seconds:
                return st.session_state[cache_key]
    return None

def set_cached_data(cache_key, data):
    """Cache data in session state with timestamp"""
    st.session_state[cache_key] = data
    st.session_state[f"cache_time_{cache_key}"] = time.time()

def clear_cache(pattern=None):
    """Clear cached data matching pattern"""
    if pattern is None:
        # Clear all cache
        keys_to_remove = [key for key in st.session_state.keys() 
                         if key.startswith('cache_time_') or key.startswith('data_')]
    else:
        keys_to_remove = [key for key in st.session_state.keys() if pattern in key]
    
    for key in keys_to_remove:
        del st.session_state[key]
    
    return len(keys_to_remove)

def format_error_message(error):
    """Format error messages for better user experience"""
    error_str = str(error).lower()
    
    if "rate" in error_str or "limit" in error_str:
        return ("🚫 **Rate Limit Exceeded**\n\n"
                "Yahoo Finance has temporarily limited your requests. This happens when too many requests are made in a short time.\n\n"
                "**What you can do:**\n"
                "- Wait 5-10 minutes before trying again\n"
                "- Use the cached data if available\n"
                "- Try a different stock ticker\n\n"
                "The app will automatically retry with delays between requests.")
    elif "network" in error_str or "connection" in error_str:
        return ("🌐 **Network Error**\n\n"
                "There seems to be a connectivity issue.\n\n"
                "**What you can do:**\n"
                "- Check your internet connection\n"
                "- Try refreshing the page\n"
                "- Wait a moment and try again")
    else:
        return f"❌ **Error**: {str(error)}"

def display_cache_info():
    """Display cache information in sidebar"""
    with st.sidebar:
        with st.expander("Cache Information"):
            cache_items = [key for key in st.session_state.keys() 
                          if key.startswith('data_') or key.startswith('model_data_')]
            
            if cache_items:
                st.write(f"**Cached items:** {len(cache_items)}")
                for item in cache_items[:5]:  # Show first 5 items
                    cache_time_key = f"cache_time_{item}"
                    if cache_time_key in st.session_state:
                        cache_time = st.session_state[cache_time_key]
                        age_minutes = (time.time() - cache_time) / 60
                        st.write(f"β€’ {item.replace('data_', '')}: {age_minutes:.1f}m ago")
                
                if len(cache_items) > 5:
                    st.write(f"... and {len(cache_items) - 5} more")
                    
                if st.button("Clear All Cache"):
                    cleared = clear_cache()
                    st.success(f"Cleared {cleared} cached items")
                    st.experimental_rerun()
            else:
                st.write("No cached data")