regisss HF staff commited on
Commit
eb48137
1 Parent(s): 2af26f3

Replace relative imports

Browse files
Files changed (1) hide show
  1. modeling_llama.py +7 -7
modeling_llama.py CHANGED
@@ -28,17 +28,17 @@ import torch.utils.checkpoint
28
  from torch import nn
29
  from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
30
 
31
- from ...activations import ACT2FN
32
- from ...cache_utils import Cache, DynamicCache
33
- from ...modeling_outputs import (
34
  BaseModelOutputWithPast,
35
  CausalLMOutputWithPast,
36
  QuestionAnsweringModelOutput,
37
  SequenceClassifierOutputWithPast,
38
  )
39
- from ...modeling_utils import PreTrainedModel
40
- from ...pytorch_utils import ALL_LAYERNORM_LAYERS
41
- from ...utils import (
42
  add_start_docstrings,
43
  add_start_docstrings_to_model_forward,
44
  is_flash_attn_2_available,
@@ -46,7 +46,7 @@ from ...utils import (
46
  logging,
47
  replace_return_docstrings,
48
  )
49
- from .configuration_llama import LlamaConfig
50
 
51
  if is_flash_attn_2_available():
52
  from flash_attn import flash_attn_func, flash_attn_varlen_func
 
28
  from torch import nn
29
  from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
30
 
31
+ from transformers.activations import ACT2FN
32
+ from transformers.cache_utils import Cache, DynamicCache
33
+ from transformers.modeling_outputs import (
34
  BaseModelOutputWithPast,
35
  CausalLMOutputWithPast,
36
  QuestionAnsweringModelOutput,
37
  SequenceClassifierOutputWithPast,
38
  )
39
+ from transformers.modeling_utils import PreTrainedModel
40
+ from transformers.pytorch_utils import ALL_LAYERNORM_LAYERS
41
+ from transformers.utils import (
42
  add_start_docstrings,
43
  add_start_docstrings_to_model_forward,
44
  is_flash_attn_2_available,
 
46
  logging,
47
  replace_return_docstrings,
48
  )
49
+ from transformers.models.llama.configuration_llama import LlamaConfig
50
 
51
  if is_flash_attn_2_available():
52
  from flash_attn import flash_attn_func, flash_attn_varlen_func