Spaces:
Paused
Paused
File size: 1,054 Bytes
e682173 f95071d 3d922cc fe841f8 f95071d fe841f8 e682173 fe841f8 e682173 fe841f8 f95071d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
from peft import PeftModel, PeftConfig
class LyricGeneratorModel:
def __init__(self, repo_id: str):
config = PeftConfig.from_pretrained(repo_id)
bnb_config = BitsAndBytesConfig(load_in_8bit=True)
model = AutoModelForCausalLM.from_pretrained(
config.base_model_name_or_path,
return_dict=True,
quantization_config=bnb_config,
device_map="auto",
)
self.tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path)
self.model = PeftModel.from_pretrained(model, repo_id)
def generate_lyrics(self, prompt: str, max_length: int):
input_ids = self.tokenizer(prompt, return_tensors="pt").input_ids
input_ids = input_ids.to("cuda")
output_tokens = self.model.generate(
input_ids, do_sample=True, max_length=max_length
)
output_text = self.tokenizer.batch_decode(output_tokens)[0]
return output_text
|