Spaces:
Paused
Paused
| from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig | |
| from peft import PeftModel, PeftConfig | |
| class LyricGeneratorModel: | |
| def __init__(self, repo_id: str): | |
| config = PeftConfig.from_pretrained(repo_id) | |
| bnb_config = BitsAndBytesConfig(load_in_8bit=True) | |
| model = AutoModelForCausalLM.from_pretrained( | |
| config.base_model_name_or_path, | |
| return_dict=True, | |
| quantization_config=bnb_config, | |
| device_map="auto", | |
| ) | |
| self.tokenizer = AutoTokenizer.from_pretrained(config.base_model_name_or_path) | |
| self.model = PeftModel.from_pretrained(model, repo_id) | |
| def generate_lyrics(self, prompt: str, max_length: int): | |
| input_ids = self.tokenizer(prompt, return_tensors="pt").input_ids | |
| input_ids = input_ids.to("cuda") | |
| output_tokens = self.model.generate( | |
| input_ids, do_sample=True, max_length=max_length | |
| ) | |
| output_text = self.tokenizer.batch_decode(output_tokens)[0] | |
| if "->:" in output_text: | |
| return output_text.split("->:")[1].strip() | |
| else: | |
| return output_text | |