ElPlaguister commited on
Commit
c90fdd5
β€’
1 Parent(s): 54768ed

WRAP Models

Browse files
Files changed (3) hide show
  1. koalpaca.py +2 -1
  2. model.py +10 -0
  3. t5.py +2 -2
koalpaca.py CHANGED
@@ -1,8 +1,9 @@
1
  import torch
2
  from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig, GenerationConfig
3
  from peft import PeftModel, PeftConfig
 
4
 
5
- class KoAlpaca:
6
  def __init__(self):
7
  peft_model_id = "4n3mone/Komuchat-koalpaca-polyglot-12.8B"
8
  config = PeftConfig.from_pretrained(peft_model_id)
 
1
  import torch
2
  from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig, GenerationConfig
3
  from peft import PeftModel, PeftConfig
4
+ from model import Model
5
 
6
+ class KoAlpaca(Model):
7
  def __init__(self):
8
  peft_model_id = "4n3mone/Komuchat-koalpaca-polyglot-12.8B"
9
  config = PeftConfig.from_pretrained(peft_model_id)
model.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ class Model:
2
+ def __init__(self):
3
+ self.model = None
4
+ self.tokenizer = None
5
+ self.gen_config = None
6
+ self.INPUT_FORMAT = None
7
+
8
+ def generate(self, inputs:str) -> str:
9
+ outputs = inputs
10
+ return outputs
t5.py CHANGED
@@ -1,6 +1,6 @@
1
  from transformers import T5TokenizerFast, T5ForConditionalGeneration, GenerationConfig
2
-
3
- class T5:
4
  def __init__(self,
5
  model_dir:str='./models/pko_t5_COMU_patience10',
6
  max_input_length:int=64,
 
1
  from transformers import T5TokenizerFast, T5ForConditionalGeneration, GenerationConfig
2
+ from model import Model
3
+ class T5(Model):
4
  def __init__(self,
5
  model_dir:str='./models/pko_t5_COMU_patience10',
6
  max_input_length:int=64,