Kevin Fink commited on
Commit
d06ead9
·
1 Parent(s): 6c1ed42
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -34,7 +34,7 @@ def fine_tune_model(model_name, dataset_name, hub_id, api_key, num_epochs, batch
34
 
35
  # Load the model and tokenizer
36
  model = AutoModelForSeq2SeqLM.from_pretrained(model_name.strip(), num_labels=2)
37
- model = get_peft_model(model, lora_config)
38
  tokenizer = AutoTokenizer.from_pretrained(model_name)
39
 
40
  # Tokenize the dataset
@@ -62,7 +62,7 @@ def fine_tune_model(model_name, dataset_name, hub_id, api_key, num_epochs, batch
62
  logging_steps=10,
63
  #push_to_hub=True,
64
  hub_model_id=hub_id.strip(),
65
- fp16=True,
66
  lr_scheduler_type='cosine',
67
  )
68
 
@@ -72,7 +72,6 @@ def fine_tune_model(model_name, dataset_name, hub_id, api_key, num_epochs, batch
72
  args=training_args,
73
  train_dataset=tokenized_datasets['train'],
74
  eval_dataset=tokenized_datasets['test'],
75
- test_dataset=tokenized_datasets['validation'],
76
  callbacks=[LoggingCallback()],
77
  )
78
 
 
34
 
35
  # Load the model and tokenizer
36
  model = AutoModelForSeq2SeqLM.from_pretrained(model_name.strip(), num_labels=2)
37
+ #model = get_peft_model(model, lora_config)
38
  tokenizer = AutoTokenizer.from_pretrained(model_name)
39
 
40
  # Tokenize the dataset
 
62
  logging_steps=10,
63
  #push_to_hub=True,
64
  hub_model_id=hub_id.strip(),
65
+ #fp16=True,
66
  lr_scheduler_type='cosine',
67
  )
68
 
 
72
  args=training_args,
73
  train_dataset=tokenized_datasets['train'],
74
  eval_dataset=tokenized_datasets['test'],
 
75
  callbacks=[LoggingCallback()],
76
  )
77