# -*- coding: utf-8 -*- pip install boto3 pandas_ta tensorflow==2.16.1 keras==3.3.3 --quiet #Import utility libraries import numpy as np import pandas as pd import os import yfinance as yf import pandas_ta as ta import boto3 #Import Machine Learning Libraries import tensorflow as tf import keras #Import Charting import matplotlib.pyplot as plt import seaborn as sns print(f"TensorFlow version: {tf.__version__}") print(f"Keras version: {keras.__version__}") """#Data Creation """ data = pd.read_csv("http://www.nasdaqtrader.com/dynamic/SymDir/nasdaqtraded.txt", sep='|') data_clean = data[data['Test Issue'] == 'N'] symbols = data_clean['NASDAQ Symbol'].tolist() print('total number of symbols traded = {}'.format(len(symbols))) # Function to create features using pandas def create_features(orig_data: pd.DataFrame) -> pd.DataFrame: data = orig_data.copy() # Ensure we're working with a copy data['Price_Diff_1'] = (data['Close'] - data['Close'].shift(1)) * 100 / data['Close'].shift(1) data['Price_Diff_3'] = (data['Close'] - data['Close'].shift(3)) * 100 / data['Close'].shift(3) data['Price_Diff_5'] = (data['Close'] - data['Close'].shift(5)) * 100 / data['Close'].shift(5) data['MA_10'] = data['Close'].rolling(window=10).mean() data['MA_50'] = data['Close'].rolling(window=50).mean() data['Volume_Diff_1'] = (data['Volume'] - data['Volume'].shift(1)) * 100 / data['Volume'].shift(1) data['Volume_Diff_3'] = (data['Volume'] - data['Volume'].shift(3)) * 100 / data['Volume'].shift(3) data['Volume_Diff_5'] = (data['Volume'] - data['Volume'].shift(5)) * 100 / data['Volume'].shift(5) data['Historical_Volatility'] = data['Close'].pct_change().rolling(window=20).std() * np.sqrt(252) window = 20 data['MA'] = data['Close'].rolling(window=window).mean() data['BB_upper'] = data['MA'] + 2 * data['Close'].rolling(window=window).std() data['BB_lower'] = data['MA'] - 2 * data['Close'].rolling(window=window).std() data['MACD'] = data['Close'].ewm(span=12, adjust=False).mean() - data['Close'].ewm(span=26, adjust=False).mean() delta = data['Close'].diff() gain = delta.clip(lower=0).rolling(window=14).mean() loss = (-delta.clip(upper=0)).rolling(window=14).mean() rs = gain / loss data['RSI'] = 100 - (100 / (1 + rs)) low_min = data['Low'].rolling(window=14).min() high_max = data['High'].rolling(window=14).max() data['Stochastic_Oscillator'] = (data['Close'] - low_min) / (high_max - low_min) # Supertrend supertrend = ta.supertrend(data['High'], data['Low'], data['Close'], length=7, multiplier=3.0) data['Supertrend'] = supertrend[f'SUPERTd_7_3.0'] return data.dropna() combined_data = pd.DataFrame() max_rows = 2000000 # Maximum number of rows for idx, symbol in enumerate(symbols): print(f"Processing stock {symbol} at {idx} position with current records : {combined_data.shape}") try: data = yf.download(symbol, period='max') if data.shape[0] == 0: continue data.reset_index(inplace = True) data['Symbol'] = symbol trimmed_data = data[-2000:].copy() # Shift the DataFrame to compare consecutive rows shifted_df = trimmed_data.shift(-1) # Identify rows where 'Open', 'High', 'Low', and 'Close' are the same as the next row duplicates = (trimmed_data['Open'] == shifted_df['Open']) & (trimmed_data['High'] == shifted_df['High']) & \ (trimmed_data['Low'] == shifted_df['Low']) & (trimmed_data['Close'] == shifted_df['Close']) # Drop the duplicate rows trimmed_data = trimmed_data[~duplicates] cleaned_data = create_features(trimmed_data) combined_data = pd.concat([combined_data, cleaned_data], ignore_index=True) # Check if the combined DataFrame has reached the maximum number of rows if combined_data.shape[0] >= max_rows: print(f"Reached {max_rows} rows. Stopping.") break except: print("In except for ticker: ", symbol) new_column_order = ['Symbol', 'Date', 'Open', 'High', 'Low', 'Close', 'Adj Close', 'Volume', 'MACD', 'RSI','Stochastic_Oscillator', 'Supertrend', 'Price_Diff_1', 'Price_Diff_3', 'Price_Diff_5', 'MA_10', 'MA_50', 'MA', 'Volume_Diff_1', 'Volume_Diff_3', 'Volume_Diff_5', 'Historical_Volatility', 'BB_upper', 'BB_lower', ] combined_data = combined_data[new_column_order]