#!/usr/bin/env python3
"""Fetch AAPL and MSFT historical prices since 2020 and plot closing prices.

Outputs a PNG under `plots/` named `aapl_msft_2020_today.png`.
"""
import os
from datetime import date

import pandas as pd
import yfinance as yf
from pandas_datareader import data as pdr
import matplotlib.pyplot as plt
import seaborn as sns
import time
import requests
from io import StringIO

# Try to import OpenBB (provides multiple provider backends)
try:
    from openbb import openbb as obb
    _HAS_OPENBB = True
except Exception:
    _HAS_OPENBB = False


def fetch_prices(tickers, start_date, max_retries=8, use_cache=True, cache_dir='data', openbb_provider=None):
    """Return a DataFrame with Adjusted Close prices for given tickers.

    Tries a bulk download first; on failure (rate limit or other), falls back to
    per-ticker downloads with retries and exponential backoff.
    """
    # Note: prefer Stooq via pandas_datareader (below). Do not use yfinance bulk download
    # at the top-level as it frequently triggers rate limits. If Stooq fails we
    # fall back to per-ticker yfinance with retries further down.

    # First try: use OpenBB if available (lets you switch provider easily)
    if _HAS_OPENBB:
        try:
            frames = {}
            for t in tickers:
                cache_file = os.path.join(cache_dir, f"{t}.csv")
                if use_cache and os.path.exists(cache_file):
                    try:
                        cached = pd.read_csv(cache_file, index_col=0, parse_dates=True)
                        cached = cached.loc[cached.index >= pd.to_datetime(start_date)]
                        if not cached.empty:
                            frames[t] = cached['Adj Close'] if 'Adj Close' in cached.columns else cached.iloc[:, 0]
                            print(f"Using cached data for {t} from {cache_file} (openbb path)")
                            continue
                    except Exception:
                        pass

                print(f"Attempting to fetch {t} from OpenBB (provider={openbb_provider})...")
                # OpenBB API signatures may vary; try a couple of common call patterns
                df = None
                try:
                    if openbb_provider is not None:
                        df = obb.equity.price.historical(t, provider=openbb_provider)
                    else:
                        df = obb.equity.price.historical(t)
                except TypeError:
                    # try different parameter names
                    try:
                        df = obb.equity.price.historical(t, start_date)
                    except Exception:
                        try:
                            df = obb.equity.price.historical(t, start=start_date)
                        except Exception as e:
                            raise

                if df is None or (hasattr(df, 'empty') and df.empty):
                    raise RuntimeError(f"OpenBB returned empty data for {t}")

                # Normalize DataFrame: ensure datetime index and a price column
                df = df.copy()
                if not pd.api.types.is_datetime64_any_dtype(df.index):
                    # try to convert index column if present
                    if 'date' in df.columns:
                        df.set_index('date', inplace=True)
                    else:
                        df.index = pd.to_datetime(df.index)
                df = df.sort_index()
                # Prefer 'Adj Close' then 'Close' then first numeric column
                if 'Adj Close' in df.columns:
                    series = df['Adj Close']
                elif 'Close' in df.columns:
                    series = df['Close']
                else:
                    # pick first numeric column
                    numcols = df.select_dtypes(include='number').columns
                    if len(numcols) > 0:
                        series = df[numcols[0]]
                    else:
                        raise RuntimeError('OpenBB data has no numeric price column')

                frames[t] = series
                # cache full df
                try:
                    os.makedirs(cache_dir, exist_ok=True)
                    df.to_csv(cache_file)
                except Exception:
                    pass
            df_all = pd.concat(frames, axis=1)
            df_all.columns = list(frames.keys())
            return df_all
        except Exception as e:
            print(f"OpenBB fetch failed or incomplete: {e}. Falling back to Stooq.")
    def fetch_stooq_direct(t):
        """Directly fetch CSV from stooq with controlled timeout and retries.

        Returns a DataFrame on success or raises on failure.
        """
        symbol = f"{t}.US"
        start = pd.to_datetime(start_date).strftime("%Y%m%d")
        end = pd.Timestamp.today().strftime("%Y%m%d")
        url = f"https://stooq.com/q/d/l/?s={symbol}&i=d&d1={start}&d2={end}"
        attempts = 0
        max_direct = 3
        while attempts < max_direct:
            try:
                resp = requests.get(url, timeout=60)
                resp.raise_for_status()
                df = pd.read_csv(StringIO(resp.text), parse_dates=['Date'], index_col='Date')
                df = df.sort_index()
                return df
            except Exception as e:
                attempts += 1
                wait = 2 ** attempts
                print(f"stooq direct fetch failed for {t} (attempt {attempts}/{max_direct}): {e}. Retrying in {wait}s")
                time.sleep(wait)
        raise RuntimeError(f"Failed to fetch {t} from stooq via direct requests after {max_direct} attempts")

    try:
        frames = {}
        for t in tickers:
            cache_file = os.path.join(cache_dir, f"{t}.csv")
            if use_cache and os.path.exists(cache_file):
                try:
                    cached = pd.read_csv(cache_file, index_col=0, parse_dates=True)
                    cached = cached.loc[cached.index >= pd.to_datetime(start_date)]
                    if not cached.empty:
                        frames[t] = cached['Adj Close'] if 'Adj Close' in cached.columns else cached.iloc[:, 0]
                        print(f"Using cached data for {t} from {cache_file} (stooq path)")
                        continue
                except Exception:
                    pass
            print(f"Attempting to fetch {t} from Stooq via direct requests...")
            try:
                df = fetch_stooq_direct(t)
                # Stooq CSV has columns: Date,Open,High,Low,Close,Volume
                frames[t] = df['Close'] if 'Close' in df.columns else df.iloc[:, 0]
                # Cache full df
                try:
                    df.to_csv(cache_file)
                except Exception:
                    pass
                continue
            except Exception as e_direct:
                print(f"Direct stooq fetch failed for {t}: {e_direct}. Falling back to pandas_datareader Stooq...")
                try:
                    df = pdr.DataReader(t, 'stooq', start_date)
                except Exception as e_pdr:
                    print(f"pandas_datareader stooq fetch failed for {t}: {e_pdr}")
                    raise
                if df is None or df.empty:
                    raise RuntimeError(f"Stooq returned empty data for {t}")
                df = df.sort_index()
                frames[t] = df['Close']
            # Cache full df
            try:
                df.to_csv(cache_file)
            except Exception:
                pass
        df_all = pd.concat(frames, axis=1)
        df_all.columns = list(frames.keys())
        return df_all
    except Exception as e:
        print(f"Stooq fetch failed or incomplete: {e}. Falling back to yfinance.")

    # Fallback: download each ticker separately with retries and cache (yfinance)
    os.makedirs(cache_dir, exist_ok=True)
    frames = {}
    for t in tickers:
        cache_file = os.path.join(cache_dir, f"{t}.csv")
        # If cache exists and use_cache, try using it first
        if use_cache and os.path.exists(cache_file):
            try:
                cached = pd.read_csv(cache_file, index_col=0, parse_dates=True)
                # Use only data since start_date
                cached = cached.loc[cached.index >= pd.to_datetime(start_date)]
                if not cached.empty:
                    frames[t] = cached['Adj Close'] if 'Adj Close' in cached.columns else cached.iloc[:, 0]
                    print(f"Using cached data for {t} from {cache_file}")
                    continue
            except Exception:
                # if cache is corrupt, ignore and re-download
                print(f"Failed to read cache for {t}, will attempt download")

        attempt = 0
        # make yfinance retries less aggressive overall but use larger waits to avoid immediate rate-limit
        while attempt < max_retries:
            try:
                print(f"Fetching {t} (attempt {attempt+1})...")
                d = yf.download(t, start=start_date, progress=False, threads=False)
                if d is None or d.empty:
                    raise RuntimeError("Empty data returned")
                if 'Adj Close' in d:
                    series = d['Adj Close']
                else:
                    series = d['Close']
                frames[t] = series
                # Cache the full dataframe for future runs
                try:
                    d.to_csv(cache_file)
                except Exception as e:
                    print(f"Warning: failed to write cache for {t}: {e}")
                break
            except Exception as e:
                attempt += 1
                # longer exponential backoff without highly aggressive retries to reduce chance of hitting YF rate limits
                wait = min(300, int((2 ** attempt) * 1.5))
                print(f"Failed to fetch {t}: {e}. Retry after {wait}s (attempt {attempt}/{max_retries})")
                time.sleep(max(1, wait))
        else:
            # If we can't download, try to use whatever cache exists or raise
            if os.path.exists(cache_file):
                try:
                    cached = pd.read_csv(cache_file, index_col=0, parse_dates=True)
                    frames[t] = cached['Adj Close'] if 'Adj Close' in cached.columns else cached.iloc[:, 0]
                    print(f"Used stale cache for {t} after failed downloads")
                    continue
                except Exception:
                    pass
            raise RuntimeError(f"Failed to download {t} after {max_retries} attempts and no cache available")

    # Combine into DataFrame
    df = pd.concat(frames, axis=1)
    df.columns = list(frames.keys())
    return df


def plot_prices(df, outpath):
    # Use seaborn to set theme (ensures the style exists)
    sns.set_theme(style='darkgrid')
    fig, ax = plt.subplots(figsize=(12, 6))
    for col in df.columns:
        ax.plot(df.index, df[col], label=col)
    ax.set_title('AAPL vs MSFT Closing Prices (since 2020)')
    ax.set_xlabel('Date')
    ax.set_ylabel('Price (USD)')
    ax.legend()
    fig.tight_layout()
    os.makedirs(os.path.dirname(outpath), exist_ok=True)
    # Do not write empty plots
    if df.empty or all(df[col].dropna().empty for col in df.columns):
        raise RuntimeError("No data to plot; aborting without saving an empty plot")
    fig.savefig(outpath, dpi=150)
    print(f"Saved plot to {outpath}")


def main():
    tickers = ['AAPL', 'MSFT']
    start_date = '2020-01-01'
    print(f"Fetching {', '.join(tickers)} since {start_date}...")
    # Allow user to set preferred provider via env var (e.g. 'tiingo', 'fmp', etc.)
    provider = os.environ.get('PREFERRED_PROVIDER') or os.environ.get('OPENBB_PROVIDER')
    if provider:
        print(f"Using provider from env: {provider}")
    df = fetch_prices(tickers, start_date, openbb_provider=provider)
    # If single-column Series, convert to DataFrame
    if isinstance(df, pd.Series):
        df = df.to_frame()
    # Ensure columns are tickers
    if isinstance(df.columns, pd.MultiIndex):
        df.columns = df.columns.get_level_values(1)
    outpath = os.path.join('plots', 'aapl_msft_2020_today.png')
    plot_prices(df, outpath)


if __name__ == '__main__':
    main()
