FUTFUT / load_model.py
mintaeng's picture
Create load_model.py
c04e74d verified
raw
history blame
814 Bytes
import os
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
from langchain_huggingface.llms import HuggingFacePipeline
'''
AutoModelForCausalLm์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ ๋ถˆ๋Ÿฌ์˜ค๊ธฐ
์†๋„๊ฐ€ ๋Š๋ฆฐํŽธ์— ์†ํ•˜๋‚˜ ๋ฌธ์ œ ์—†์ด ๋Œ์•„๊ฐˆ ์ˆ˜ ์žˆ๋Š” ์•ˆ์ •์ ์ธ ๋ฐฉ์‹
'''
def load_Auto():
model_id = 'Dongwookss/small_fut_final'
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(
model_id,
torch_dtype=torch.bfloat16,
device_map="auto",
)
pipe = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
max_new_tokens=2048,
)
llm = HuggingFacePipeline(
pipeline=pipe,
model_kwargs={"temperature": 0.7},
)
return llm