BrightBlueCheese commited on
Commit
c9fcefb
1 Parent(s): 6c018d5
.ipynb_checkpoints/app-checkpoint.py CHANGED
@@ -57,7 +57,7 @@ max_length = max_seq_length
57
  num_workers = 2
58
 
59
  # I just reused our previous research code with some modifications.
60
- dir_main = "./"
61
  name_model_mtr = "ChemLlama_Medium_30m_vloss_val_loss=0.029_ep_epoch=04.ckpt"
62
 
63
  dir_model_mtr = f"{dir_main}/SolLlama-mtr/{name_model_mtr}"
@@ -73,7 +73,8 @@ num_workers = 2
73
  ## FT
74
 
75
  ver_ft = 0
76
- dir_model_ft_to_save = f"{dir_main}/SolLlama-mtr"
 
77
  # name_model_ft = 'Solvent.pt'
78
  name_model_ft = f"{solute_or_solvent}.pt"
79
 
@@ -130,7 +131,7 @@ trainer = L.Trainer(
130
 
131
 
132
  # Predict
133
- local_model_ft = utils_sl.load_model_ft_with_epoch(
134
  class_model_ft=model_ft,
135
  dir_model_ft=dir_model_ft_to_save,
136
  name_model_ft=name_model_ft
 
57
  num_workers = 2
58
 
59
  # I just reused our previous research code with some modifications.
60
+ dir_main = "."
61
  name_model_mtr = "ChemLlama_Medium_30m_vloss_val_loss=0.029_ep_epoch=04.ckpt"
62
 
63
  dir_model_mtr = f"{dir_main}/SolLlama-mtr/{name_model_mtr}"
 
73
  ## FT
74
 
75
  ver_ft = 0
76
+ # dir_model_ft_to_save = f"{dir_main}/SolLlama-mtr"
77
+ ir_model_ft_to_save = f"{dir_main}SolLlama-mtr"
78
  # name_model_ft = 'Solvent.pt'
79
  name_model_ft = f"{solute_or_solvent}.pt"
80
 
 
131
 
132
 
133
  # Predict
134
+ local_model_ft = utils_sl.load_model_ft_with(
135
  class_model_ft=model_ft,
136
  dir_model_ft=dir_model_ft_to_save,
137
  name_model_ft=name_model_ft
.ipynb_checkpoints/utils_sl-checkpoint.py CHANGED
@@ -78,20 +78,16 @@ def model_evalulator_sol(
78
  # from .model_finetune import CustomFinetuneModel
79
  # import model_finetune_sol
80
  import torch
81
- def load_model_ft_with_epoch(class_model_ft,
82
  dir_model_ft:str,
83
  name_model_ft:str):
84
  # dir_model_ft level 1
85
  # ex /main/model_mtr/model_mtr_ep/dataset
86
-
87
-
88
- # extension = '.ckpt'
89
- extension = '.pt'
90
 
91
  dir_target_model_ft = f"{dir_model_ft}/{name_model_ft}"
92
 
93
 
94
- loaded_state_dict = torch.load(dir_target_model_ft)
95
  class_model_ft.load_state_dict(loaded_state_dict['state_dict'])
96
 
97
  return class_model_ft # now is model_ft
 
78
  # from .model_finetune import CustomFinetuneModel
79
  # import model_finetune_sol
80
  import torch
81
+ def load_model_ft_with(class_model_ft,
82
  dir_model_ft:str,
83
  name_model_ft:str):
84
  # dir_model_ft level 1
85
  # ex /main/model_mtr/model_mtr_ep/dataset
 
 
 
 
86
 
87
  dir_target_model_ft = f"{dir_model_ft}/{name_model_ft}"
88
 
89
 
90
+ loaded_state_dict = torch.load(dir_target_model_ft, map_location=torch.device('cpu'))
91
  class_model_ft.load_state_dict(loaded_state_dict['state_dict'])
92
 
93
  return class_model_ft # now is model_ft
app.py CHANGED
@@ -57,7 +57,7 @@ max_length = max_seq_length
57
  num_workers = 2
58
 
59
  # I just reused our previous research code with some modifications.
60
- dir_main = "./"
61
  name_model_mtr = "ChemLlama_Medium_30m_vloss_val_loss=0.029_ep_epoch=04.ckpt"
62
 
63
  dir_model_mtr = f"{dir_main}/SolLlama-mtr/{name_model_mtr}"
@@ -73,7 +73,8 @@ num_workers = 2
73
  ## FT
74
 
75
  ver_ft = 0
76
- dir_model_ft_to_save = f"{dir_main}/SolLlama-mtr"
 
77
  # name_model_ft = 'Solvent.pt'
78
  name_model_ft = f"{solute_or_solvent}.pt"
79
 
@@ -130,7 +131,7 @@ trainer = L.Trainer(
130
 
131
 
132
  # Predict
133
- local_model_ft = utils_sl.load_model_ft_with_epoch(
134
  class_model_ft=model_ft,
135
  dir_model_ft=dir_model_ft_to_save,
136
  name_model_ft=name_model_ft
 
57
  num_workers = 2
58
 
59
  # I just reused our previous research code with some modifications.
60
+ dir_main = "."
61
  name_model_mtr = "ChemLlama_Medium_30m_vloss_val_loss=0.029_ep_epoch=04.ckpt"
62
 
63
  dir_model_mtr = f"{dir_main}/SolLlama-mtr/{name_model_mtr}"
 
73
  ## FT
74
 
75
  ver_ft = 0
76
+ # dir_model_ft_to_save = f"{dir_main}/SolLlama-mtr"
77
+ ir_model_ft_to_save = f"{dir_main}SolLlama-mtr"
78
  # name_model_ft = 'Solvent.pt'
79
  name_model_ft = f"{solute_or_solvent}.pt"
80
 
 
131
 
132
 
133
  # Predict
134
+ local_model_ft = utils_sl.load_model_ft_with(
135
  class_model_ft=model_ft,
136
  dir_model_ft=dir_model_ft_to_save,
137
  name_model_ft=name_model_ft
utils_sl.py CHANGED
@@ -78,20 +78,16 @@ def model_evalulator_sol(
78
  # from .model_finetune import CustomFinetuneModel
79
  # import model_finetune_sol
80
  import torch
81
- def load_model_ft_with_epoch(class_model_ft,
82
  dir_model_ft:str,
83
  name_model_ft:str):
84
  # dir_model_ft level 1
85
  # ex /main/model_mtr/model_mtr_ep/dataset
86
-
87
-
88
- # extension = '.ckpt'
89
- extension = '.pt'
90
 
91
  dir_target_model_ft = f"{dir_model_ft}/{name_model_ft}"
92
 
93
 
94
- loaded_state_dict = torch.load(dir_target_model_ft)
95
  class_model_ft.load_state_dict(loaded_state_dict['state_dict'])
96
 
97
  return class_model_ft # now is model_ft
 
78
  # from .model_finetune import CustomFinetuneModel
79
  # import model_finetune_sol
80
  import torch
81
+ def load_model_ft_with(class_model_ft,
82
  dir_model_ft:str,
83
  name_model_ft:str):
84
  # dir_model_ft level 1
85
  # ex /main/model_mtr/model_mtr_ep/dataset
 
 
 
 
86
 
87
  dir_target_model_ft = f"{dir_model_ft}/{name_model_ft}"
88
 
89
 
90
+ loaded_state_dict = torch.load(dir_target_model_ft, map_location=torch.device('cpu'))
91
  class_model_ft.load_state_dict(loaded_state_dict['state_dict'])
92
 
93
  return class_model_ft # now is model_ft