from transformers import GPT2LMHeadModel, GPT2Tokenizer # Load the pre-trained GPT-2 model and tokenizer model = GPT2LMHeadModel.from_pretrained("gpt2") tokenizer = GPT2Tokenizer.from_pretrained("gpt2") # Define your prompt prompt = "Ideas for a new product" # Generate ideas with the MindGPT AI model input_ids = tokenizer.encode(prompt, return_tensors='pt') output = model.generate(input_ids, max_length=50, do_sample=True) # Decode the generated ideas and print them generated_ideas = tokenizer.decode(output[0], skip_special_tokens=True) print(generated_ideas)