kashif HF staff commited on
Commit
ab18a66
1 Parent(s): c69fc66

Upload TimeSeriesTransformerForPrediction

Browse files
Files changed (2) hide show
  1. config.json +10 -10
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -6,24 +6,24 @@
6
  ],
7
  "attention_dropout": 0.1,
8
  "cardinality": [
9
- 862
10
  ],
11
- "context_length": 96,
12
- "d_model": 32,
13
  "decoder_attention_heads": 2,
14
  "decoder_ffn_dim": 32,
15
  "decoder_layerdrop": 0.1,
16
- "decoder_layers": 4,
17
  "distribution_output": "student_t",
18
  "dropout": 0.1,
19
  "embedding_dimension": [
20
- 2
21
  ],
22
  "encoder_attention_heads": 2,
23
  "encoder_ffn_dim": 32,
24
  "encoder_layerdrop": 0.1,
25
- "encoder_layers": 4,
26
- "feature_size": 49,
27
  "init_std": 0.02,
28
  "input_size": 1,
29
  "is_encoder_decoder": true,
@@ -73,11 +73,11 @@
73
  "model_type": "time_series_transformer",
74
  "num_dynamic_real_features": 0,
75
  "num_parallel_samples": 100,
76
- "num_static_categorical_features": 1,
77
  "num_static_real_features": 0,
78
  "num_time_features": 5,
79
- "prediction_length": 48,
80
- "scaling": "mean",
81
  "torch_dtype": "float32",
82
  "transformers_version": "4.30.0.dev0",
83
  "use_cache": true
 
6
  ],
7
  "attention_dropout": 0.1,
8
  "cardinality": [
9
+ 0
10
  ],
11
+ "context_length": 24,
12
+ "d_model": 16,
13
  "decoder_attention_heads": 2,
14
  "decoder_ffn_dim": 32,
15
  "decoder_layerdrop": 0.1,
16
+ "decoder_layers": 2,
17
  "distribution_output": "student_t",
18
  "dropout": 0.1,
19
  "embedding_dimension": [
20
+ 0
21
  ],
22
  "encoder_attention_heads": 2,
23
  "encoder_ffn_dim": 32,
24
  "encoder_layerdrop": 0.1,
25
+ "encoder_layers": 2,
26
+ "feature_size": 47,
27
  "init_std": 0.02,
28
  "input_size": 1,
29
  "is_encoder_decoder": true,
 
73
  "model_type": "time_series_transformer",
74
  "num_dynamic_real_features": 0,
75
  "num_parallel_samples": 100,
76
+ "num_static_categorical_features": 0,
77
  "num_static_real_features": 0,
78
  "num_time_features": 5,
79
+ "prediction_length": 24,
80
+ "scaling": "none",
81
  "torch_dtype": "float32",
82
  "transformers_version": "4.30.0.dev0",
83
  "use_cache": true
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9a551f9a2b38e200fe66acb2340b4143bc6dc830cee70b482c6cd7fe77b14758
3
- size 394342
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f71a8306f471ba58645ee0fa2d441938089b4489171f23e0f9f6e55fcce74b4f
3
+ size 90196