metadata
library_name: transformers
tags:
- unsloth
- trl
- sft
datasets:
- yahma/alpaca-cleaned
- ayoubkirouane/Small-Instruct-Alpaca_Format
base model :
- google/gemma-7b
dataset :
- ayoubkirouane/Small-Instruct-Alpaca_Format
- yahma/alpaca-cleaned
get started :
# Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("text-generation", model="ayoubkirouane/Gemma_7b_Alpaca")
or :
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("ayoubkirouane/Gemma_7b_Alpaca")
model = AutoModelForCausalLM.from_pretrained("ayoubkirouane/Gemma_7b_Alpaca")