File size: 3,084 Bytes
e39e45c
a982981
e39e45c
 
1832ae9
 
e39e45c
 
1832ae9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e39e45c
 
 
 
 
1832ae9
e39e45c
 
 
1832ae9
e39e45c
 
 
 
 
1832ae9
 
 
 
 
 
 
 
e39e45c
 
 
 
 
1832ae9
961717a
a982981
961717a
 
a982981
961717a
 
a982981
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
from dataclasses import dataclass
from pathlib import Path
from typing import Union, List, Tuple

import torch
import torch.nn.functional as F
from huggingface_hub import ModelHubMixin
from torch import nn


class Dense(nn.Module):

    def __init__(self, input_dim, output_dim, bias=True, activation=nn.LeakyReLU, **kwargs):
        super().__init__()
        self.fc = nn.Linear(input_dim, output_dim, bias=bias)
        nn.init.xavier_uniform_(self.fc.weight)
        nn.init.constant_(self.fc.bias, 0.0)
        self.activation = activation(**kwargs) if activation is not None else None

    def forward(self, x):
        if self.activation is None:
            return self.fc(x)
        return self.activation(self.fc(x))


class Encoder(nn.Module):
    def __init__(self, input_dim, *dims):
        super().__init__()
        dims = (input_dim,) + dims
        self.layers = nn.Sequential(
            *[Dense(dims[i], dims[i+1], negative_slope=0.4, inplace=True) for i in range(len(dims) - 1)]
        )
    def forward(self, x):
        return self.layers(x)


class Decoder(nn.Module):
    def __init__(self, output_dim, *dims):
        super().__init__()
        self.layers = nn.Sequential(
            *[Dense(dims[i], dims[i + 1], negative_slope=0.4, inplace=True) for i in range(len(dims) - 1)]
            + [Dense(dims[-1], output_dim, activation=nn.Sigmoid)]
        )
    def forward(self, x):
        return self.layers(x)


@dataclass
class AutoencoderConfig:
    input_dim: int = 784
    hidden_dims: Union[Tuple[str], List[str]] = (256, 64, 16, 4, 2)


class Autoencoder(nn.Module, ModelHubMixin):

    def __init__(self, config: Union[dict, AutoencoderConfig] = AutoencoderConfig(), **kwargs):
        super().__init__()
        self.config = AutoencoderConfig(**config) if isinstance(config, dict) else config
        self.config.__dict__.update(**kwargs)

        self.encoder = Encoder(self.config.input_dim, *self.config.hidden_dims)
        self.decoder = Decoder(self.config.input_dim, *reversed(self.config.hidden_dims))

    def forward(self, x):
        x = x.flatten(1)
        latent = self.encoder(x)
        recon = self.decoder(latent)
        loss = F.mse_loss(recon, x)
        return recon, latent, loss

    def save_pretrained(self, save_directory, **kwargs):
        assert 'config' not in kwargs, \
            "save_pretrained handles passing model config for you, please dont pass it"
        super().save_pretrained(save_directory, config=self.config.__dict__, **kwargs)


class MessageModel:

    def __init__(self, msg='hello, world'):
        self.msg = msg

    def __call__(self):
        print(self.msg)

    @classmethod
    def from_pretrained(cls, path):
        path = Path(path)
        msg_file_path = path / 'message.txt'
        assert msg_file_path.exists()
        msg = msg_file_path.read_text()
        return cls(msg)

    def save_pretrained(self, path):
        path = Path(path)
        path.mkdir(exist_ok=True, parents=True)
        msg_file_path = path / 'message.txt'
        msg_file_path.write_text(self.msg)