Commit
•
be6286d
1
Parent(s):
b852487
Upload modeling_rwkv6.py (#2)
Browse files- Upload modeling_rwkv6.py (4a4c25d9a6f7de42c793d47af833661abd67646f)
Co-authored-by: Weili Xu <weili-0234@users.noreply.huggingface.co>
- modeling_rwkv6.py +1 -1
modeling_rwkv6.py
CHANGED
@@ -38,7 +38,7 @@ from transformers.utils import (
|
|
38 |
|
39 |
from .configuration_rwkv6 import Rwkv6Config
|
40 |
try:
|
41 |
-
from fla.ops.rwkv6
|
42 |
except ImportError:
|
43 |
print("Required module is not installed. Please install it using the following commands:")
|
44 |
print("pip install -U git+https://github.com/sustcsonglin/flash-linear-attention")
|
|
|
38 |
|
39 |
from .configuration_rwkv6 import Rwkv6Config
|
40 |
try:
|
41 |
+
from fla.ops.rwkv6 import fused_recurrent_rwkv6
|
42 |
except ImportError:
|
43 |
print("Required module is not installed. Please install it using the following commands:")
|
44 |
print("pip install -U git+https://github.com/sustcsonglin/flash-linear-attention")
|