# developer: Taoshidev # Copyright © 2023 Taoshi, LLC # developer: Taoshidev # Copyright © 2023 Taoshi, LLC import random import numpy as np from sklearn.preprocessing import MinMaxScaler from mining_objects.xgb_mining_model import BaseMiningModel from mining_objects.mining_utils import MiningUtils from time_util.time_util import TimeUtil from vali_objects.dataclasses.client_request import ClientRequest from vali_config import ValiConfig import bittensor as bt # historical doesnt have timestamps data_structure = MiningUtils.get_file("/runnable/historical_financial_data/data.pickle", True) #data_structure = [data_structure[0][curr_iter:curr_iter+iter_add], # data_structure[1][curr_iter:curr_iter+iter_add], # data_structure[2][curr_iter:curr_iter+iter_add], # data_structure[3][curr_iter:curr_iter+iter_add], # data_structure[4][curr_iter:curr_iter+iter_add]] print(len(data_structure[0])) print("start", TimeUtil.millis_to_timestamp(data_structure[0][0])) print("end", TimeUtil.millis_to_timestamp(data_structure[0][len(data_structure[0])-1])) sds_ndarray = np.array(data_structure).T scaler = MinMaxScaler(feature_range=(0, 1)) scaled_data = scaler.fit_transform(sds_ndarray) scaled_data = scaled_data.T # will iterate and prepare the dataset and train the model as provided prep_dataset = BaseMiningModel.base_model_dataset(scaled_data) base_mining_model = BaseMiningModel(len(prep_dataset.T)).set_model_dir('./mining_models/xgbTrain.model') base_mining_model.train(prep_dataset)#, epochs=25)