weili-0234
commited on
Commit
•
4a4c25d
1
Parent(s):
b852487
Upload modeling_rwkv6.py
Browse filesfix import error with flash linear attention
- modeling_rwkv6.py +1 -1
modeling_rwkv6.py
CHANGED
@@ -38,7 +38,7 @@ from transformers.utils import (
|
|
38 |
|
39 |
from .configuration_rwkv6 import Rwkv6Config
|
40 |
try:
|
41 |
-
from fla.ops.rwkv6
|
42 |
except ImportError:
|
43 |
print("Required module is not installed. Please install it using the following commands:")
|
44 |
print("pip install -U git+https://github.com/sustcsonglin/flash-linear-attention")
|
|
|
38 |
|
39 |
from .configuration_rwkv6 import Rwkv6Config
|
40 |
try:
|
41 |
+
from fla.ops.rwkv6 import fused_recurrent_rwkv6
|
42 |
except ImportError:
|
43 |
print("Required module is not installed. Please install it using the following commands:")
|
44 |
print("pip install -U git+https://github.com/sustcsonglin/flash-linear-attention")
|