wmgifford commited on
Commit
d8adb46
1 Parent(s): be53f6b

Upload PatchTSTForRegression

Browse files
Files changed (3) hide show
  1. config.json +51 -0
  2. generation_config.json +4 -0
  3. model.safetensors +3 -0
config.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "model/pretrained_masked",
3
+ "activation_function": "gelu",
4
+ "architectures": [
5
+ "PatchTSTForRegression"
6
+ ],
7
+ "attention_dropout": 0.0,
8
+ "bias": true,
9
+ "channel_attention": false,
10
+ "channel_consistent_masking": false,
11
+ "context_length": 512,
12
+ "d_model": 128,
13
+ "distribution_output": "normal",
14
+ "do_mask_input": false,
15
+ "dropout": 0.2,
16
+ "ff_dropout": 0.0,
17
+ "ffn_dim": 512,
18
+ "head_dropout": 0.2,
19
+ "init_std": 0.02,
20
+ "loss": null,
21
+ "mask_type": "random",
22
+ "mask_value": 0,
23
+ "model_type": "patchtst",
24
+ "norm_eps": 1e-05,
25
+ "norm_type": "batchnorm",
26
+ "num_attention_heads": 16,
27
+ "num_forecast_mask_patches": [
28
+ 2
29
+ ],
30
+ "num_hidden_layers": 3,
31
+ "num_input_channels": 6,
32
+ "num_parallel_samples": 100,
33
+ "num_targets": 1,
34
+ "output_range": null,
35
+ "patch_length": 12,
36
+ "patch_stride": 12,
37
+ "path_dropout": 0.0,
38
+ "pooling_type": "mean",
39
+ "positional_dropout": 0.0,
40
+ "positional_encoding_type": "sincos",
41
+ "pre_norm": true,
42
+ "prediction_length": 24,
43
+ "random_mask_ratio": 0.4,
44
+ "scaling": "std",
45
+ "share_embedding": true,
46
+ "share_projection": true,
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.37.0.dev0",
49
+ "unmasked_channel_indices": null,
50
+ "use_cls_token": false
51
+ }
generation_config.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "transformers_version": "4.37.0.dev0"
4
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd38495b0fa7e254dac74056f954c027327b9879c1ec36039a611475e6897c00
3
+ size 2428024