WebashalarForML commited on
Commit
7ce541d
1 Parent(s): fcd0a70

Update backup/model.py

Browse files
Files changed (1) hide show
  1. backup/model.py +6 -6
backup/model.py CHANGED
@@ -5,11 +5,11 @@ import re
5
  from typing import Dict, Optional, Union
6
  import torch
7
  import torch.nn.functional as F
8
- from modules.layers import LstmSeq2SeqEncoder
9
- from modules.base import InstructBase
10
- from modules.evaluator import Evaluator, greedy_search
11
- from modules.span_rep import SpanRepLayer
12
- from modules.token_rep import TokenRepLayer
13
  from torch import nn
14
  from torch.nn.utils.rnn import pad_sequence
15
  from huggingface_hub import PyTorchModelHubMixin, hf_hub_download
@@ -320,7 +320,7 @@ class GLiNER(InstructBase, PyTorchModelHubMixin):
320
  return model
321
 
322
  # 2. Newer format: Use "pytorch_model.bin" and "gliner_config.json"
323
- from train import load_config_as_namespace
324
 
325
  model_file = Path(model_id) / "pytorch_model.bin"
326
  if not model_file.exists():
 
5
  from typing import Dict, Optional, Union
6
  import torch
7
  import torch.nn.functional as F
8
+ from .modules.layers import LstmSeq2SeqEncoder
9
+ from .modules.base import InstructBase
10
+ from .modules.evaluator import Evaluator, greedy_search
11
+ from .modules.span_rep import SpanRepLayer
12
+ from .modules.token_rep import TokenRepLayer
13
  from torch import nn
14
  from torch.nn.utils.rnn import pad_sequence
15
  from huggingface_hub import PyTorchModelHubMixin, hf_hub_download
 
320
  return model
321
 
322
  # 2. Newer format: Use "pytorch_model.bin" and "gliner_config.json"
323
+ from .train import load_config_as_namespace
324
 
325
  model_file = Path(model_id) / "pytorch_model.bin"
326
  if not model_file.exists():