SergioMtz commited on
Commit
a0e9c41
1 Parent(s): 2aff23c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -2
app.py CHANGED
@@ -1,4 +1,6 @@
1
  import tensorflow as tf
 
 
2
  import tensorflow_text as text
3
  from tensorflow.train import Checkpoint
4
  import pandas as pd
@@ -34,8 +36,29 @@ transformer = Transformer(vocab_size_enc = VOCAB_SIZE,
34
  FFN_units = FFN_UNITS,
35
  nb_proj = NB_PROJ,
36
  dropout_rate = DROPOUT_RATE)
37
-
38
- ckpt = Checkpoint()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  ckpt.restore("ckpt-10")
40
  print("Checkpoint Restaurado")
41
 
 
1
  import tensorflow as tf
2
+ from tensorflow.keras.optimizers import Adam
3
+ from tensorflow.keras.optimizers.schedules import LearningRateSchedule
4
  import tensorflow_text as text
5
  from tensorflow.train import Checkpoint
6
  import pandas as pd
 
36
  FFN_units = FFN_UNITS,
37
  nb_proj = NB_PROJ,
38
  dropout_rate = DROPOUT_RATE)
39
+
40
+ class CustomSchedule(LearningRateSchedule):
41
+ def __init__(self, d_model, warmup_steps = 4000):
42
+ super(CustomSchedule, self).__init__()
43
+
44
+ self.d_model = tf.cast(d_model, tf.float32)
45
+ self.warmup_steps = warmup_steps
46
+
47
+ def __call__(self, step):
48
+ arg1 = tf.math.rsqrt(step)
49
+ arg2 = step * (self.warmup_steps**-1.5)
50
+
51
+ return tf.math.rsqrt(self.d_model) * tf.math.minimum(arg1, arg2)
52
+
53
+ leaning_rate = CustomSchedule(D_MODEL)
54
+
55
+ optimizer = Adam(leaning_rate,
56
+ beta_1=0.9,
57
+ beta_2=0.98,
58
+ epsilon=1e-9)
59
+
60
+ ckpt = tf.train.Checkpoint(transformer = transformer,
61
+ optimizer = optimizer)
62
  ckpt.restore("ckpt-10")
63
  print("Checkpoint Restaurado")
64