aslawliet commited on
Commit
4612cbf
1 Parent(s): 3658d76

Update modeling_quietstar.py

Browse files
Files changed (1) hide show
  1. modeling_quietstar.py +7 -7
modeling_quietstar.py CHANGED
@@ -42,12 +42,12 @@ import torch.utils.checkpoint
42
  from torch import nn
43
  from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
44
 
45
- from ...activations import ACT2FN
46
- from ...cache_utils import Cache, DynamicCache
47
- from ...modeling_attn_mask_utils import _prepare_4d_causal_attention_mask, _prepare_4d_causal_attention_mask_for_sdpa
48
- from ...modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast, SequenceClassifierOutputWithPast
49
- from ...modeling_utils import PreTrainedModel
50
- from ...utils import (
51
  add_start_docstrings,
52
  add_start_docstrings_to_model_forward,
53
  is_flash_attn_2_available,
@@ -55,7 +55,7 @@ from ...utils import (
55
  logging,
56
  replace_return_docstrings,
57
  )
58
- from .configuration_mistral import MistralConfig
59
 
60
 
61
  if is_flash_attn_2_available():
 
42
  from torch import nn
43
  from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
44
 
45
+ from transformers.activations import ACT2FN
46
+ from transformers.cache_utils import Cache, DynamicCache
47
+ from transformers.modeling_attn_mask_utils import _prepare_4d_causal_attention_mask, _prepare_4d_causal_attention_mask_for_sdpa
48
+ from transformers.modeling_outputs import BaseModelOutputWithPast, CausalLMOutputWithPast, SequenceClassifierOutputWithPast
49
+ from transformers.modeling_utils import PreTrainedModel
50
+ from transformers.utils import (
51
  add_start_docstrings,
52
  add_start_docstrings_to_model_forward,
53
  is_flash_attn_2_available,
 
55
  logging,
56
  replace_return_docstrings,
57
  )
58
+ from .configuration_quietstar import MistralConfig
59
 
60
 
61
  if is_flash_attn_2_available():