weili-0234 commited on
Commit
0386403
1 Parent(s): c17eed9

Upload modeling_rwkv6.py

Browse files

fix import error with flash linear attention

Files changed (1) hide show
  1. modeling_rwkv6.py +1 -4
modeling_rwkv6.py CHANGED
@@ -17,7 +17,6 @@
17
  from dataclasses import dataclass
18
  from typing import List, Optional, Tuple, Union
19
 
20
- from pathlib import Path
21
 
22
  import torch
23
  import torch.nn.functional as F
@@ -31,14 +30,12 @@ from transformers.utils import (
31
  add_code_sample_docstrings,
32
  add_start_docstrings,
33
  add_start_docstrings_to_model_forward,
34
- is_ninja_available,
35
- is_torch_cuda_available,
36
  logging,
37
  )
38
 
39
  from .configuration_rwkv6 import Rwkv6Config
40
  try:
41
- from fla.ops.rwkv6.recurrent_fuse import fused_recurrent_rwkv6
42
  except ImportError:
43
  print("Required module is not installed. Please install it using the following commands:")
44
  print("pip install -U git+https://github.com/sustcsonglin/flash-linear-attention")
 
17
  from dataclasses import dataclass
18
  from typing import List, Optional, Tuple, Union
19
 
 
20
 
21
  import torch
22
  import torch.nn.functional as F
 
30
  add_code_sample_docstrings,
31
  add_start_docstrings,
32
  add_start_docstrings_to_model_forward,
 
 
33
  logging,
34
  )
35
 
36
  from .configuration_rwkv6 import Rwkv6Config
37
  try:
38
+ from fla.ops.rwkv6 import fused_recurrent_rwkv6
39
  except ImportError:
40
  print("Required module is not installed. Please install it using the following commands:")
41
  print("pip install -U git+https://github.com/sustcsonglin/flash-linear-attention")