Add earnings model with calendar detection, implied move, post-earnings drift analysis
Browse files- earnings_model.py +290 -0
earnings_model.py
ADDED
|
@@ -0,0 +1,290 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Earnings Model v1.0 — Earnings Event Intelligence
|
| 2 |
+
Detects earnings proximity, estimates implied move from options,
|
| 3 |
+
analyzes post-earnings drift patterns, and adjusts position sizing.
|
| 4 |
+
Based on: Ball & Brown (1968), Frazzini & Lamont (2007) PEAD
|
| 5 |
+
"""
|
| 6 |
+
import yfinance as yf
|
| 7 |
+
import numpy as np
|
| 8 |
+
import pandas as pd
|
| 9 |
+
from datetime import datetime, timedelta
|
| 10 |
+
from typing import Dict, Optional, Tuple
|
| 11 |
+
|
| 12 |
+
# Typical quarterly earnings windows by month (approximate)
|
| 13 |
+
EARNINGS_CALENDAR = {
|
| 14 |
+
# Q1 (Jan-Mar): reports Apr-May
|
| 15 |
+
'Q1': {'months': [4, 5], 'label': 'Q1'},
|
| 16 |
+
# Q2 (Apr-Jun): reports Jul-Aug
|
| 17 |
+
'Q2': {'months': [7, 8], 'label': 'Q2'},
|
| 18 |
+
# Q3 (Jul-Sep): reports Oct-Nov
|
| 19 |
+
'Q3': {'months': [10, 11], 'label': 'Q3'},
|
| 20 |
+
# Q4 (Oct-Dec): reports Jan-Feb
|
| 21 |
+
'Q4': {'months': [1, 2], 'label': 'Q4'},
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
# Sector-specific typical implied moves (based on historical options data)
|
| 25 |
+
SECTOR_IMPLIED_MOVES = {
|
| 26 |
+
'Technology': 0.045,
|
| 27 |
+
'Healthcare': 0.040,
|
| 28 |
+
'Financials': 0.030,
|
| 29 |
+
'Energy': 0.055,
|
| 30 |
+
'Consumer Discretionary': 0.050,
|
| 31 |
+
'Consumer Staples': 0.025,
|
| 32 |
+
'Industrials': 0.035,
|
| 33 |
+
'Communication': 0.045,
|
| 34 |
+
'Utilities': 0.020,
|
| 35 |
+
'Materials': 0.045,
|
| 36 |
+
'Real Estate': 0.030,
|
| 37 |
+
'default': 0.040,
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class EarningsModel:
|
| 42 |
+
"""Earnings event intelligence for quant trading."""
|
| 43 |
+
|
| 44 |
+
def __init__(self):
|
| 45 |
+
self._cache = {}
|
| 46 |
+
|
| 47 |
+
def estimate_earnings_date(self, ticker: str) -> Optional[datetime.date]:
|
| 48 |
+
"""Estimate next earnings date from yfinance calendar."""
|
| 49 |
+
try:
|
| 50 |
+
t = yf.Ticker(ticker)
|
| 51 |
+
cal = t.calendar
|
| 52 |
+
if cal is not None and not cal.empty:
|
| 53 |
+
# yfinance calendar returns next earnings date
|
| 54 |
+
if hasattr(cal, 'index') and 'Earnings Date' in cal.index:
|
| 55 |
+
date_str = cal.loc['Earnings Date'].values[0]
|
| 56 |
+
if isinstance(date_str, str):
|
| 57 |
+
return datetime.strptime(date_str, '%Y-%m-%d').date()
|
| 58 |
+
elif 'Earnings Date' in cal.columns:
|
| 59 |
+
date_str = cal['Earnings Date'].iloc[0]
|
| 60 |
+
if isinstance(date_str, str):
|
| 61 |
+
return datetime.strptime(date_str, '%Y-%m-%d').date()
|
| 62 |
+
# Fallback: estimate from current month
|
| 63 |
+
today = datetime.now()
|
| 64 |
+
for q, data in EARNINGS_CALENDAR.items():
|
| 65 |
+
if today.month in data['months']:
|
| 66 |
+
# Next likely report window: mid-month
|
| 67 |
+
return datetime(today.year, today.month, 15).date()
|
| 68 |
+
return None
|
| 69 |
+
except Exception:
|
| 70 |
+
return None
|
| 71 |
+
|
| 72 |
+
def days_to_earnings(self, ticker: str) -> Tuple[Optional[int], Optional[datetime.date]]:
|
| 73 |
+
"""Return (days_until, date)."""
|
| 74 |
+
ed = self.estimate_earnings_date(ticker)
|
| 75 |
+
if ed is None:
|
| 76 |
+
return None, None
|
| 77 |
+
today = datetime.now().date()
|
| 78 |
+
delta = (ed - today).days
|
| 79 |
+
return delta, ed
|
| 80 |
+
|
| 81 |
+
def implied_move(self, ticker: str, default_move: float = None) -> Dict:
|
| 82 |
+
"""Estimate implied earnings move from options chain if available.
|
| 83 |
+
Fallback to sector average or default.
|
| 84 |
+
"""
|
| 85 |
+
move = default_move or 0.04 # 4% default
|
| 86 |
+
source = 'default'
|
| 87 |
+
|
| 88 |
+
try:
|
| 89 |
+
t = yf.Ticker(ticker)
|
| 90 |
+
# Try to get implied move from near-the-money straddle
|
| 91 |
+
# Find the closest expiration before/after estimated earnings
|
| 92 |
+
expiry = t.options
|
| 93 |
+
if expiry and len(expiry) > 0:
|
| 94 |
+
# Get first expiration
|
| 95 |
+
opt = t.option_chain(expiry[0])
|
| 96 |
+
calls = opt.calls
|
| 97 |
+
puts = opt.puts
|
| 98 |
+
|
| 99 |
+
if len(calls) > 0 and len(puts) > 0:
|
| 100 |
+
# ATM strike
|
| 101 |
+
spot = calls['strike'].iloc[len(calls)//2]
|
| 102 |
+
atm_call = calls[calls['strike'].sub(spot).abs().idxmin()]
|
| 103 |
+
atm_put = puts[puts['strike'].sub(spot).abs().idxmin()]
|
| 104 |
+
|
| 105 |
+
# Straddle price
|
| 106 |
+
call_p = atm_call['lastPrice'] if 'lastPrice' in atm_call else atm_call['ask']
|
| 107 |
+
put_p = atm_put['lastPrice'] if 'lastPrice' in atm_put else atm_put['ask']
|
| 108 |
+
straddle = float(call_p) + float(put_p)
|
| 109 |
+
|
| 110 |
+
# Implied move as % of stock price
|
| 111 |
+
current_price = yf.Ticker(ticker).history(period='1d')['Close'].iloc[-1]
|
| 112 |
+
move = straddle / current_price
|
| 113 |
+
source = 'options_chain'
|
| 114 |
+
|
| 115 |
+
# Annualize: if 1 month until expiry
|
| 116 |
+
days = 30 # approximate
|
| 117 |
+
move_annual = move * np.sqrt(365 / max(1, days))
|
| 118 |
+
|
| 119 |
+
return {
|
| 120 |
+
'implied_move_pct': round(move * 100, 2),
|
| 121 |
+
'annualized_pct': round(move_annual * 100, 2),
|
| 122 |
+
'straddle_price': round(float(straddle), 2),
|
| 123 |
+
'source': source,
|
| 124 |
+
'expiry': expiry[0],
|
| 125 |
+
'atm_strike': float(spot),
|
| 126 |
+
}
|
| 127 |
+
except Exception as e:
|
| 128 |
+
pass
|
| 129 |
+
|
| 130 |
+
# Try sector-based
|
| 131 |
+
try:
|
| 132 |
+
info = yf.Ticker(ticker).info
|
| 133 |
+
sector = info.get('sector', '')
|
| 134 |
+
for sector_name, sector_move in SECTOR_IMPLIED_MOVES.items():
|
| 135 |
+
if sector_name.lower() in sector.lower():
|
| 136 |
+
move = sector_move
|
| 137 |
+
source = f'sector_{sector_name}'
|
| 138 |
+
break
|
| 139 |
+
except:
|
| 140 |
+
pass
|
| 141 |
+
|
| 142 |
+
return {
|
| 143 |
+
'implied_move_pct': round(move * 100, 2),
|
| 144 |
+
'annualized_pct': round(move * np.sqrt(12) * 100, 2),
|
| 145 |
+
'source': source,
|
| 146 |
+
}
|
| 147 |
+
|
| 148 |
+
def historical_pead(self, ticker: str, lookback_years: int = 3) -> Dict:
|
| 149 |
+
"""Post-Earnings Announcement Drift analysis.
|
| 150 |
+
Measures how much stock drifts in direction of surprise after earnings.
|
| 151 |
+
Returns drift score: positive = bullish drift tendency, negative = bearish.
|
| 152 |
+
"""
|
| 153 |
+
try:
|
| 154 |
+
# Fetch enough history
|
| 155 |
+
df = yf.Ticker(ticker).history(period=f"{lookback_years}y")
|
| 156 |
+
if len(df) < 100:
|
| 157 |
+
return {'drift_score': 0, 'confidence': 0, 'n_events': 0}
|
| 158 |
+
|
| 159 |
+
# Detect earnings dates (volume spikes on quarterly frequency)
|
| 160 |
+
df['volume_z'] = (df['Volume'] - df['Volume'].rolling(20).mean()) / df['Volume'].rolling(20).std()
|
| 161 |
+
# Find volume spike days
|
| 162 |
+
spike_days = df[df['volume_z'] > 2.5].index
|
| 163 |
+
|
| 164 |
+
if len(spike_days) < 4:
|
| 165 |
+
return {'drift_score': 0, 'confidence': 0, 'n_events': 0}
|
| 166 |
+
|
| 167 |
+
# Analyze returns post-spike
|
| 168 |
+
drifts = []
|
| 169 |
+
for spike in spike_days:
|
| 170 |
+
try:
|
| 171 |
+
idx = df.index.get_loc(spike)
|
| 172 |
+
if idx + 5 >= len(df): continue
|
| 173 |
+
# Day +1 to +5 return
|
| 174 |
+
post_ret = (df['Close'].iloc[idx+5] / df['Close'].iloc[idx]) - 1
|
| 175 |
+
# Day 0 overnight surprise (gap)
|
| 176 |
+
overnight_gap = (df['Open'].iloc[idx] / df['Close'].iloc[idx-1]) - 1 if idx > 0 else 0
|
| 177 |
+
# PEAD: continuation of surprise
|
| 178 |
+
drift = np.sign(overnight_gap) * post_ret
|
| 179 |
+
drifts.append(drift)
|
| 180 |
+
except:
|
| 181 |
+
continue
|
| 182 |
+
|
| 183 |
+
if len(drifts) < 3:
|
| 184 |
+
return {'drift_score': 0, 'confidence': 0, 'n_events': len(spike_days)}
|
| 185 |
+
|
| 186 |
+
drift_score = np.mean(drifts)
|
| 187 |
+
confidence = min(1.0, len(drifts) / 12) # More events = more confidence
|
| 188 |
+
|
| 189 |
+
return {
|
| 190 |
+
'drift_score': round(float(drift_score), 4),
|
| 191 |
+
'confidence': round(float(confidence), 2),
|
| 192 |
+
'n_events': len(drifts),
|
| 193 |
+
'avg_overnight_gap': round(float(np.mean([d for d in drifts])), 4) if drifts else 0,
|
| 194 |
+
'interpretation': (
|
| 195 |
+
'Positive PEAD: earnings surprises tend to continue' if drift_score > 0.02 else
|
| 196 |
+
'Negative PEAD: post-earnings reversals typical' if drift_score < -0.02 else
|
| 197 |
+
'Weak PEAD pattern'
|
| 198 |
+
),
|
| 199 |
+
}
|
| 200 |
+
|
| 201 |
+
except Exception:
|
| 202 |
+
return {'drift_score': 0, 'confidence': 0, 'n_events': 0}
|
| 203 |
+
|
| 204 |
+
def earnings_position_size(self, base_size: float, days_to_earnings: int,
|
| 205 |
+
implied_move: float = 0.04) -> Dict:
|
| 206 |
+
"""Adjust position size for earnings proximity.
|
| 207 |
+
|
| 208 |
+
Strategy:
|
| 209 |
+
- D-30 to D-7: Full size (can position for earnings)
|
| 210 |
+
- D-7 to D-3: Reduce to 50% (avoid theta decay, lock profits)
|
| 211 |
+
- D-3 to D-1: Reduce to 25% (extreme event risk)
|
| 212 |
+
- D-Day: 0% (do not hold into earnings)
|
| 213 |
+
- D+1 to D+5: Can re-enter at 50% (capture PEAD)
|
| 214 |
+
"""
|
| 215 |
+
if days_to_earnings is None:
|
| 216 |
+
return {
|
| 217 |
+
'adjusted_size': base_size,
|
| 218 |
+
'reduction': 0.0,
|
| 219 |
+
'strategy': 'No earnings date detected — normal sizing',
|
| 220 |
+
}
|
| 221 |
+
|
| 222 |
+
if days_to_earnings > 7:
|
| 223 |
+
adj = base_size
|
| 224 |
+
strategy = 'Pre-earnings positioning window — full size'
|
| 225 |
+
elif days_to_earnings > 3:
|
| 226 |
+
adj = base_size * 0.5
|
| 227 |
+
strategy = 'Reduce to 50% — earnings week risk building'
|
| 228 |
+
elif days_to_earnings > 0:
|
| 229 |
+
adj = base_size * 0.25
|
| 230 |
+
strategy = 'Reduce to 25% — imminent earnings, extreme theta'
|
| 231 |
+
elif days_to_earnings == 0:
|
| 232 |
+
adj = 0.0
|
| 233 |
+
strategy = 'DO NOT HOLD INTO EARNINGS — day-of closure'
|
| 234 |
+
elif days_to_earnings >= -1:
|
| 235 |
+
adj = base_size * 0.5
|
| 236 |
+
strategy = 'Post-earnings PEAD window — 50% re-entry'
|
| 237 |
+
elif days_to_earnings >= -5:
|
| 238 |
+
adj = base_size * 0.5
|
| 239 |
+
strategy = 'PEAD continuation — 50% size'
|
| 240 |
+
else:
|
| 241 |
+
adj = base_size
|
| 242 |
+
strategy = 'Post-earnings quiet period — normal sizing'
|
| 243 |
+
|
| 244 |
+
# Adjust for implied move magnitude
|
| 245 |
+
if implied_move > 0.08: # > 8% expected move
|
| 246 |
+
adj *= 0.7
|
| 247 |
+
strategy += ' | High implied move (>8%), further reduced'
|
| 248 |
+
elif implied_move > 0.06:
|
| 249 |
+
adj *= 0.85
|
| 250 |
+
strategy += ' | Elevated implied move, slight reduction'
|
| 251 |
+
|
| 252 |
+
return {
|
| 253 |
+
'days_to_earnings': days_to_earnings,
|
| 254 |
+
'implied_move_pct': round(implied_move * 100, 2),
|
| 255 |
+
'base_size': round(base_size, 4),
|
| 256 |
+
'adjusted_size': round(adj, 4),
|
| 257 |
+
'reduction': round(1 - (adj / (base_size + 1e-10)), 2),
|
| 258 |
+
'strategy': strategy,
|
| 259 |
+
}
|
| 260 |
+
|
| 261 |
+
def full_analysis(self, ticker: str, base_size: float = 1.0) -> Dict:
|
| 262 |
+
"""Complete earnings intelligence for a ticker."""
|
| 263 |
+
days, ed = self.days_to_earnings(ticker)
|
| 264 |
+
implied = self.implied_move(ticker)
|
| 265 |
+
pead = self.historical_pead(ticker)
|
| 266 |
+
|
| 267 |
+
sizing = self.earnings_position_size(
|
| 268 |
+
base_size, days,
|
| 269 |
+
implied.get('implied_move_pct', 4) / 100
|
| 270 |
+
)
|
| 271 |
+
|
| 272 |
+
return {
|
| 273 |
+
'ticker': ticker,
|
| 274 |
+
'estimated_earnings_date': ed.strftime('%Y-%m-%d') if ed else 'unknown',
|
| 275 |
+
'days_to_earnings': days,
|
| 276 |
+
'implied_move': implied,
|
| 277 |
+
'pead_analysis': pead,
|
| 278 |
+
'position_sizing': sizing,
|
| 279 |
+
'recommendation': sizing['strategy'],
|
| 280 |
+
}
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
if __name__ == '__main__':
|
| 284 |
+
model = EarningsModel()
|
| 285 |
+
result = model.full_analysis('AAPL', base_size=1.0)
|
| 286 |
+
print(f"Estimated Earnings: {result['estimated_earnings_date']}")
|
| 287 |
+
print(f"Days Until: {result['days_to_earnings']}")
|
| 288 |
+
print(f"Implied Move: {result['implied_move'].get('implied_move_pct', 'N/A')}%")
|
| 289 |
+
print(f"PEAD Score: {result['pead_analysis'].get('drift_score', 0):.4f}")
|
| 290 |
+
print(f"Position Sizing: {result['position_sizing']['adjusted_size']*100:.0f}% ({result['position_sizing']['strategy']})")
|