from transformers import AutoModelForCausalLM, AutoTokenizer # Load tokenizer and model manually tokenizer = AutoTokenizer.from_pretrained("Omartificial-Intelligence-Space/Arabic-QWQ-32B-Preview") model = AutoModelForCausalLM.from_pretrained("Omartificial-Intelligence-Space/Arabic-QWQ-32B-Preview") # Manually perform text generation inputs = tokenizer("Generate text example:", return_tensors="pt") outputs = model.generate(**inputs, max_length=50) # Decode the output print(tokenizer.decode(outputs[0]))