Model Card for Model (OpenHermes_fourier_merge_v1)
models merge fourier :
- model_1: "teknium/OpenHermes-2.5-Mistral-7B"
- model_2: 'teknium/OpenHermes-2-Mistral-7B'
from transformers import AutoModelForCausalLM, AutoTokenizer,pipeline
import torch
model_id="NickyNicky/OpenHermes_fourier_merge_v1"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id,
device_map="auto",
trust_remote_code=True,
).eval()
pipe = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer
)
txt= "dame un ejemplo del lenguaje de programacion Python"
messages = [
{"role": "user", "content":txt},
]
prompt = pipe.tokenizer.apply_chat_template(
messages,
tokenize=False,
add_generation_prompt=True,
)
res = pipe(
prompt,
max_new_tokens=2056,
do_sample=True,
temperature=0.31,
)
print(res[0]["generated_text"])