Satandon1999
commited on
Commit
•
f5a1a1b
1
Parent(s):
f80aaa3
Update positional_embedding.py
Browse filesFix based on the discussion here: https://huggingface.co/microsoft/Phi-3-small-8k-instruct/discussions/11
- positional_embedding.py +2 -2
positional_embedding.py
CHANGED
@@ -269,10 +269,10 @@ class RotaryEmbedding(torch.nn.Module):
|
|
269 |
return (
|
270 |
apply_rotary_pos_emb(
|
271 |
q, cos_cached[seqlen_offset:seq_len], sin_cached[seqlen_offset:seq_len], seq_dimension=seq_dimension
|
272 |
-
),
|
273 |
apply_rotary_pos_emb(
|
274 |
k, cos_cached[seqlen_offset:seq_len], sin_cached[seqlen_offset:seq_len], seq_dimension=seq_dimension
|
275 |
-
),
|
276 |
)
|
277 |
|
278 |
@classmethod
|
|
|
269 |
return (
|
270 |
apply_rotary_pos_emb(
|
271 |
q, cos_cached[seqlen_offset:seq_len], sin_cached[seqlen_offset:seq_len], seq_dimension=seq_dimension
|
272 |
+
).to(q.dtype),
|
273 |
apply_rotary_pos_emb(
|
274 |
k, cos_cached[seqlen_offset:seq_len], sin_cached[seqlen_offset:seq_len], seq_dimension=seq_dimension
|
275 |
+
).to(q.dtype),
|
276 |
)
|
277 |
|
278 |
@classmethod
|