{ "_name_or_path": "model/pretrained_masked", "activation_function": "gelu", "architectures": [ "PatchTSTForRegression" ], "attention_dropout": 0.0, "bias": true, "channel_attention": false, "channel_consistent_masking": false, "context_length": 512, "d_model": 128, "distribution_output": "normal", "do_mask_input": false, "dropout": 0.2, "ff_dropout": 0.0, "ffn_dim": 512, "head_dropout": 0.2, "init_std": 0.02, "loss": null, "mask_type": "random", "mask_value": 0, "model_type": "patchtst", "norm_eps": 1e-05, "norm_type": "batchnorm", "num_attention_heads": 16, "num_forecast_mask_patches": [ 2 ], "num_hidden_layers": 3, "num_input_channels": 6, "num_parallel_samples": 100, "num_targets": 1, "output_range": null, "patch_length": 12, "patch_stride": 12, "path_dropout": 0.0, "pooling_type": "mean", "positional_dropout": 0.0, "positional_encoding_type": "sincos", "pre_norm": true, "prediction_length": 24, "random_mask_ratio": 0.4, "scaling": "std", "share_embedding": true, "share_projection": true, "torch_dtype": "float32", "transformers_version": "4.37.0.dev0", "unmasked_channel_indices": null, "use_cls_token": false }