# Load model directly from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("cognitivecomputations/dolphin-2.6-mixtral-8x7b") model = AutoModelForCausalLM.from_pretrained("cognitivecomputations/dolphin-2.6-mixtral-8x7b")