|
from collections import defaultdict |
|
|
|
import datasets |
|
from datasets import load_dataset |
|
import numpy as np |
|
from scipy import stats |
|
|
|
METADATA_FUNC = { |
|
"abs": [ |
|
"mean_vmag", |
|
"phot_g_mean_mag", |
|
"phot_bp_mean_mag", |
|
"phot_rp_mean_mag", |
|
"j_mag", |
|
"h_mag", |
|
"k_mag", |
|
"w1_mag", |
|
"w2_mag", |
|
"w3_mag", |
|
"w4_mag", |
|
], |
|
"cos": ["l"], |
|
"sin": ["b"], |
|
"log": ["period"] |
|
} |
|
|
|
|
|
def preprocess_spectra(example): |
|
""" |
|
Preprocess spectral data. Steps: |
|
- Interpolate flux and flux error to a fixed wavelength grid (3850 to 9000 Å). |
|
- Normalize flux using mean and median absolute deviation (MAD). |
|
- Append MAD as an auxiliary feature. |
|
""" |
|
spectra = example['spectra'] |
|
wavelengths = spectra[:, 0] |
|
flux = spectra[:, 1] |
|
flux_err = spectra[:, 2] |
|
|
|
|
|
new_wavelengths = np.arange(3850, 9000, 2) |
|
flux = np.interp(new_wavelengths, wavelengths, flux) |
|
flux_err = np.interp(new_wavelengths, wavelengths, flux_err) |
|
|
|
|
|
mean = np.mean(flux) |
|
mad = stats.median_abs_deviation(flux[flux != 0]) |
|
|
|
flux = (flux - mean) / mad |
|
flux_err = flux_err / mad |
|
aux_values = np.full_like(flux, np.log10(mad)) |
|
|
|
|
|
spectra = np.vstack([flux, flux_err, aux_values]) |
|
example['spectra'] = spectra |
|
|
|
return example |
|
|
|
|
|
def preprocess_lc(example): |
|
""" |
|
Preprocess photometry (light curve) data. Steps: |
|
- Remove duplicate time entries. |
|
- Sort by Heliocentric Julian Date (HJD). |
|
- Normalize flux and flux error using mean and median absolute deviation (MAD). |
|
- Scale time values between 0 and 1. |
|
- Append auxiliary features (log MAD and time span delta_t). |
|
""" |
|
X = example['photometry'] |
|
aux_values = np.stack(list(example['metadata']['photo_cols'].values())) |
|
|
|
|
|
X = np.unique(X, axis=0) |
|
|
|
|
|
sorted_indices = np.argsort(X[:, 0]) |
|
X = X[sorted_indices] |
|
|
|
|
|
mean = X[:, 1].mean() |
|
mad = stats.median_abs_deviation(X[:, 1]) |
|
X[:, 1] = (X[:, 1] - mean) / mad |
|
X[:, 2] = X[:, 2] / mad |
|
|
|
|
|
delta_t = (X[:, 0].max() - X[:, 0].min()) / 365 |
|
|
|
|
|
X[:, 0] = (X[:, 0] - X[:, 0].min()) / (X[:, 0].max() - X[:, 0].min()) |
|
|
|
|
|
aux_values = np.concatenate((aux_values, [np.log10(mad), delta_t])) |
|
|
|
|
|
aux_values = np.tile(aux_values, (X.shape[0], 1)) |
|
X = np.concatenate((X, aux_values), axis=-1) |
|
|
|
example['photometry'] = X |
|
return example |
|
|
|
|
|
def transform_metadata(example): |
|
""" |
|
Transforms the metadata of an example based on METADATA_FUNC. |
|
""" |
|
metadata = example["metadata"] |
|
|
|
|
|
|
|
for col in METADATA_FUNC["abs"]: |
|
if col in metadata["meta_cols"]: |
|
|
|
metadata["meta_cols"][col] = ( |
|
metadata["meta_cols"][col] |
|
- 10 |
|
+ 5 * np.log10(np.where(metadata["meta_cols"]["parallax"] <= 0, 1, metadata["meta_cols"]["parallax"])) |
|
) |
|
|
|
|
|
for col in METADATA_FUNC["cos"]: |
|
if col in metadata["meta_cols"]: |
|
metadata["meta_cols"][col] = np.cos(np.radians(metadata["meta_cols"][col])) |
|
|
|
|
|
for col in METADATA_FUNC["sin"]: |
|
if col in metadata["meta_cols"]: |
|
metadata["meta_cols"][col] = np.sin(np.radians(metadata["meta_cols"][col])) |
|
|
|
|
|
for col in METADATA_FUNC["log"]: |
|
if col in metadata["photo_cols"]: |
|
metadata["photo_cols"][col] = np.log10(metadata["photo_cols"][col]) |
|
|
|
|
|
example["metadata"] = metadata |
|
return example |
|
|
|
|
|
def compute_metadata_stats(ds): |
|
""" |
|
Compute the mean and standard deviation for each column in meta_cols and photo_cols. |
|
""" |
|
meta_vals = defaultdict(list) |
|
photo_vals = defaultdict(list) |
|
|
|
|
|
for example in ds: |
|
meta = example["metadata"]["meta_cols"] |
|
photo = example["metadata"]["photo_cols"] |
|
for col, value in meta.items(): |
|
meta_vals[col].append(value) |
|
for col, value in photo.items(): |
|
photo_vals[col].append(value) |
|
|
|
|
|
stats = {"meta_cols": {}, "photo_cols": {}} |
|
for col, values in meta_vals.items(): |
|
arr = np.stack(values) |
|
stats["meta_cols"][col] = {"mean": arr.mean(), "std": arr.std()} |
|
for col, values in photo_vals.items(): |
|
arr = np.stack(values) |
|
stats["photo_cols"][col] = {"mean": arr.mean(), "std": arr.std()} |
|
|
|
return stats |
|
|
|
|
|
def normalize_metadata(example, info): |
|
""" |
|
Normalize metadata values using z-score normalization: |
|
(value - mean) / std. |
|
|
|
The 'stats' parameter should be a dictionary with computed means and stds for both meta_cols and photo_cols. |
|
""" |
|
metadata = example["metadata"] |
|
|
|
|
|
for col, value in metadata["meta_cols"].items(): |
|
mean = info["meta_cols"][col]["mean"] |
|
std = info["meta_cols"][col]["std"] |
|
metadata["meta_cols"][col] = (metadata["meta_cols"][col] - mean) / std |
|
|
|
|
|
for col, value in metadata["photo_cols"].items(): |
|
mean = info["photo_cols"][col]["mean"] |
|
std = info["photo_cols"][col]["std"] |
|
metadata["photo_cols"][col] = (metadata["photo_cols"][col] - mean) / std |
|
|
|
example["metadata"] = metadata |
|
return example |
|
|
|
|
|
def preprocess_metadata(example): |
|
""" |
|
Extract the values from 'meta_cols' and stack them into a numpy array. |
|
""" |
|
example["metadata"] = np.stack(list(example["metadata"]["meta_cols"].values())) |
|
return example |
|
|
|
|
|
def main(): |
|
""" |
|
Main function for processing and uploading datasets. |
|
|
|
- Loads each dataset based on subset and random seed. |
|
- Applies preprocessing for spectra, photometry, and metadata. |
|
- Casts columns to appropriate feature types. |
|
- Pushes the processed dataset to Hugging Face Hub. |
|
""" |
|
for sub in ["sub10", "sub25", "sub50", "full"]: |
|
for seed in [42, 66, 0, 12, 123]: |
|
name = f"{sub}_{seed}" |
|
print(f"Processing: {name}") |
|
|
|
|
|
ds = load_dataset('MeriDK/AstroM3Dataset', name=name, trust_remote_code=True, num_proc=16) |
|
ds = ds.with_format('numpy') |
|
|
|
|
|
ds = ds.map(transform_metadata, num_proc=16) |
|
info = compute_metadata_stats(ds['train']) |
|
ds = ds.map(lambda example: normalize_metadata(example, info)) |
|
|
|
|
|
ds = ds.map(preprocess_spectra, num_proc=16) |
|
ds = ds.cast_column('spectra', datasets.Array2D(shape=(3, 2575), dtype='float32')) |
|
|
|
|
|
ds = ds.map(preprocess_lc, num_proc=16) |
|
ds = ds.cast_column('photometry', datasets.Array2D(shape=(None, 9), dtype='float32')) |
|
|
|
|
|
ds = ds.map(preprocess_metadata, num_proc=16) |
|
ds = ds.cast_column('metadata', datasets.Sequence(feature=datasets.Value('float32'), length=34)) |
|
|
|
|
|
ds = ds.cast_column('label', datasets.ClassLabel( |
|
names=['DSCT', 'EA', 'EB', 'EW', 'HADS', 'M', 'ROT', 'RRAB', 'RRC', 'SR'])) |
|
|
|
|
|
ds.push_to_hub('MeriDK/AstroM3Processed', config_name=name) |
|
|
|
|
|
if __name__ == '__main__': |
|
main() |
|
|