File size: 3,126 Bytes
b100e1c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
# Copyright 2022 The T5X Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Tests for t5x.checkpoint_importer."""

import json
import os

from absl import flags
from absl.testing import absltest
import jax
import numpy as np
from t5x import checkpoint_importer
import tensorflow as tf


class CheckpointImporterTest(absltest.TestCase):

  def test_rel_embeddings_shared_layers(self):
    # This represents a ckpt where the Mesh TensorFlow's
    # transformer_layers.SelfAttention.relative_attention_type = "bias_shared",
    # i.e., the same relative attention parameters are shared by all layers
    # within the (en|de)coder.
    ckpt_data = {
        'encoder/block_000/layer_000/SelfAttention/relative_attention_bias':
            1,
        'decoder/block_000/layer_000/SelfAttention/relative_attention_bias':
            2,
        'decoder/block_000/layer_000/SelfAttention/relative_attention_bias_slot_v':
            3,
    }
    t5_data = checkpoint_importer.t5_importer.apply(ckpt_data)
    t5_data = checkpoint_importer._maybe_correct_relpos_bias(t5_data)
    expected = {
        'target/encoder/relpos_bias/rel_embedding': 1,
        'target/decoder/relpos_bias/rel_embedding': 2,
        'state/param_states/decoder/relpos_bias/rel_embedding/v': 3,
    }
    self.assertEqual(t5_data, expected)

  def test_rel_embeddings_per_layer(self):
    # This represents a ckpt where the Mesh TensorFlow's
    # transformer_layers.SelfAttention.relative_attention_type = "bias", i.e.,
    # each layer has its own relative attention parameters.
    ckpt_data = {
        'encoder/block_000/layer_000/SelfAttention/relative_attention_bias':
            1,
        'encoder/block_001/layer_000/SelfAttention/relative_attention_bias':
            2,
        'decoder/block_000/layer_000/SelfAttention/relative_attention_bias':
            3,
        'decoder/block_000/layer_000/SelfAttention/relative_attention_bias_slot_v':
            4,
        'decoder/block_011/layer_000/SelfAttention/relative_attention_bias':
            5
    }
    t5_data = checkpoint_importer.t5_importer.apply(ckpt_data)
    t5_data = checkpoint_importer._maybe_correct_relpos_bias(t5_data)
    expected = {
        'target/encoder/layers_0/relpos_bias/rel_embedding': 1,
        'target/encoder/layers_1/relpos_bias/rel_embedding': 2,
        'target/decoder/layers_0/relpos_bias/rel_embedding': 3,
        'state/param_states/decoder/layers_0/relpos_bias/rel_embedding/v': 4,
        'target/decoder/layers_11/relpos_bias/rel_embedding': 5,
    }
    self.assertEqual(t5_data, expected)


if __name__ == '__main__':
  absltest.main()