from dataset import MyDataset
import json
import torch
from modeling import fuseModelWithAppendToBegin
from transformers import AutoModel,AutoTokenizer,AutoModelForCausalLM
with open('/home/lxy/multiR/Fuser/data/train.json','r',encoding='utf-8') as f:
    train_data=json.load(f)['data']

with open('/home/lxy/multiR/Fuser/data/dev.json','r',encoding='utf-8') as f:
    eval_data=json.load(f)['data']

with open('/home/lxy/multiR/Fuser/data/test.json','r',encoding='utf-8') as f:
    test_data=json.load(f)['data']
train_dataset=MyDataset(train_data)
eval_dataset=MyDataset(eval_data)
test_dataset=MyDataset(test_data)

sent2vec_tokenizer = AutoTokenizer.from_pretrained('BAAI/bge-large-zh-v1.5',model_max_length=512)
sent2vec = AutoModel.from_pretrained('BAAI/bge-large-zh-v1.5',torch_dtype=torch.float16)
llm_tokenizer = AutoTokenizer.from_pretrained('/data/lxy/baichuan2-chat-7b',trust_remote_code=True)
llm = AutoModelForCausalLM.from_pretrained('/data/lxy/baichuan2-chat-7b',trust_remote_code=True,torch_dtype=torch.bfloat16)

model=fuseModelWithAppendToBegin(sent2vec=sent2vec,sent2vec_tokenizer=sent2vec_tokenizer,
                                 llm=llm,llm_tokenizer=llm_tokenizer)
