asenella commited on
Commit
04f73b4
1 Parent(s): d9e7863

Uploading MVTCAE in asenella/translated_mmnist_resnets_2_MVTCAE_xz4byb9z

Browse files
Files changed (6) hide show
  1. README.md +13 -0
  2. decoders.pkl +3 -0
  3. encoders.pkl +3 -0
  4. environment.json +1 -0
  5. model.pt +3 -0
  6. model_config.json +1 -0
README.md ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language: en
3
+ tags:
4
+ - multivae
5
+ license: apache-2.0
6
+ ---
7
+
8
+ ### Downloading this model from the Hub
9
+ This model was trained with multivae. It can be downloaded or reloaded using the method `load_from_hf_hub`
10
+ ```python
11
+ >>> from multivae.models import AutoModel
12
+ >>> model = AutoModel.load_from_hf_hub(hf_hub_path="your_hf_username/repo_name")
13
+ ```
decoders.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3459d686cc5dd9886403cf916ad8cf04fb4639bb4a822cb47dc351ed5026e391
3
+ size 63894015
encoders.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad977f3d07ad720638c26d49f21b5f1b9cc61fe917a0c39212d2ba9c69c5ae7b
3
+ size 113836471
environment.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "EnvironmentConfig", "python_version": "3.10"}
model.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:271ba62bbe166927332795717587055165be968d0e189da8c8106ae22740f3c7
3
+ size 177646002
model_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"name": "MVTCAEConfig", "n_modalities": 5, "latent_dim": 200, "input_dims": {"m0": [3, 28, 28], "m1": [3, 28, 28], "m2": [3, 28, 28], "m3": [3, 28, 28], "m4": [3, 28, 28]}, "uses_likelihood_rescaling": false, "rescale_factors": null, "decoders_dist": {"m0": "laplace", "m1": "laplace", "m2": "laplace", "m3": "laplace", "m4": "laplace"}, "decoder_dist_params": {"m0": {"scale": 0.75}, "m1": {"scale": 0.75}, "m2": {"scale": 0.75}, "m3": {"scale": 0.75}, "m4": {"scale": 0.75}}, "logits_to_std": "softplus", "custom_architectures": ["encoders", "decoders"], "alpha": 0.8333333333333334, "beta": 1.0}