File size: 2,320 Bytes
2856356
 
 
 
 
 
a96f4dc
2856356
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import re

from flax.core.frozen_dict import freeze
from flax.traverse_util import flatten_dict, unflatten_dict
from jax.experimental import PartitionSpec as P

# utils adapted from https://github.com/google-research/google-research/blob/master/flax_models/t5x/partitions.py
# Sentinels
_unmatched = object()

# For specifying empty leaf dict `{}`
empty_dict = object()


def _match(qs, ks):
    """Return True if regexes in qs match any window of strings in tuple ks."""
    # compile regexes and force complete match
    qts = tuple(map(lambda x: re.compile(x + "$"), qs))
    for i in range(len(ks) - len(qs) + 1):
        matches = [x.match(y) for x, y in zip(qts, ks[i:])]
        if matches and all(matches):
            return True
    return False


def _replacement_rules(rules):
    def replace(key, val):
        for rule, replacement in rules:
            if _match(rule, key):
                return replacement
        return val

    return replace


def _get_partition_rules():
    return [
        # embeddings
        ((r"embed_positions", "embedding"), P("mp", None)),
        ((r"embed_tokens", "embedding"), P("mp", None)),
        # self-attention
        ((r"self_attn", "(q_proj|k_proj|v_proj)", "kernel"), P(None, "mp")),
        ((r"self_attn", "out_proj", "kernel"), P("mp", None)),
        # enc-dec attention
        ((r"encoder_attn", "(q_proj|k_proj|v_proj)", "kernel"), P(None, "mp")),
        ((r"encoder_attn", "out_proj", "kernel"), P("mp", None)),
        # FFN
        ((r"fc1", "kernel"), P(None, "mp")),
        ((r"fc2", "kernel"), P("mp", None)),
        # layer norms
        ((r"layernorm_embedding", "(bias|scale)"), None),
        ((r"self_attn_layer_norm", "(bias|scale)"), None),
        ((r"encoder_attn_layer_norm", "(bias|scale)"), None),
        ((r"final_layer_norm", "(bias|scale)"), None),
        ((r"lm_head", "kernel"), P(None, "mp")),
    ]


def set_partitions(in_dict):
    rules = _get_partition_rules()
    replace = _replacement_rules(rules)
    initd = {k: _unmatched for k in flatten_dict(in_dict)}
    result = {k: replace(k, v) for k, v in initd.items()}
    for k, v in result.items():
        if v == _unmatched:
            print(k)
    assert _unmatched not in result.values(), "Incomplete partition spec."
    return freeze(unflatten_dict(result))