File size: 1,981 Bytes
94f372a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import os
from typing import Any
import pytorch_lightning as L
import torch
from hydra.utils import instantiate
from models.huggingface import Geolocalizer

class EvalModule(L.LightningModule):
    def __init__(self, cfg):
        super().__init__()
        self.cfg = cfg
        os.chdir(cfg.network.root_dir)
        self.model = Geolocalizer.from_pretrained('osv5m/baseline')
        self.test_metrics = instantiate(cfg.test_metrics)

    def training_step(self, batch, batch_idx):
        pred = self.model(batch)
        pass

    @torch.no_grad()
    def validation_step(self, batch, batch_idx):
        pred = self.model(batch)
        pass

    def on_validation_epoch_end(self):
        pass

    @torch.no_grad()
    def test_step(self, batch, batch_idx):
        pred = self.model.forward_tensor(batch)
        self.test_metrics.update({"gps": pred}, batch)

    def on_test_epoch_end(self):
        metrics = self.test_metrics.compute()
        for metric_name, metric_value in metrics.items():
            self.log(
                f"test/{metric_name}",
                metric_value,
                sync_dist=True,
                on_step=False,
                on_epoch=True,
            )  

    def lr_scheduler_step(self, scheduler, metric):
        scheduler.step(self.global_step)


def get_parameter_names(model, forbidden_layer_types):
    """

    Returns the names of the model parameters that are not inside a forbidden layer.

    Taken from HuggingFace transformers.

    """
    result = []
    for name, child in model.named_children():
        result += [
            f"{name}.{n}"
            for n in get_parameter_names(child, forbidden_layer_types)
            if not isinstance(child, tuple(forbidden_layer_types))
        ]
    # Add model specific parameters (defined with nn.Parameter) since they are not in any child.
    result += list(model._parameters.keys())
    return result