SunderAli17 commited on
Commit
75891cd
1 Parent(s): 6a86c90

Delete eva_clip/flux/modules/conditioner.py

Browse files
eva_clip/flux/modules/conditioner.py DELETED
@@ -1,37 +0,0 @@
1
- from torch import Tensor, nn
2
- from transformers import CLIPTextModel, CLIPTokenizer, T5EncoderModel, T5Tokenizer
3
-
4
-
5
- class HFEmbedder(nn.Module):
6
- def __init__(self, version: str, max_length: int, **hf_kwargs):
7
- super().__init__()
8
- self.is_clip = version.startswith("openai")
9
- self.max_length = max_length
10
- self.output_key = "pooler_output" if self.is_clip else "last_hidden_state"
11
-
12
- if self.is_clip:
13
- self.tokenizer: CLIPTokenizer = CLIPTokenizer.from_pretrained(version, max_length=max_length)
14
- self.hf_module: CLIPTextModel = CLIPTextModel.from_pretrained(version, **hf_kwargs)
15
- else:
16
- self.tokenizer: T5Tokenizer = T5Tokenizer.from_pretrained(version, max_length=max_length)
17
- self.hf_module: T5EncoderModel = T5EncoderModel.from_pretrained(version, **hf_kwargs)
18
-
19
- self.hf_module = self.hf_module.eval().requires_grad_(False)
20
-
21
- def forward(self, text: list[str]) -> Tensor:
22
- batch_encoding = self.tokenizer(
23
- text,
24
- truncation=True,
25
- max_length=self.max_length,
26
- return_length=False,
27
- return_overflowing_tokens=False,
28
- padding="max_length",
29
- return_tensors="pt",
30
- )
31
-
32
- outputs = self.hf_module(
33
- input_ids=batch_encoding["input_ids"].to(self.hf_module.device),
34
- attention_mask=None,
35
- output_hidden_states=False,
36
- )
37
- return outputs[self.output_key]