File size: 16,493 Bytes
300b2d5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e2d885b
300b2d5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e2d885b
300b2d5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
e2d885b
300b2d5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70f4221
 
300b2d5
 
 
 
261df98
300b2d5
0a397cd
300b2d5
 
 
 
 
 
 
0a397cd
193c5da
 
300b2d5
 
0a397cd
300b2d5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
476afdb
 
ad25c16
46cd447
476afdb
 
 
 
 
 
 
 
 
 
 
 
 
46cd447
476afdb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46cd447
476afdb
 
 
 
 
 
 
 
46cd447
476afdb
 
 
 
 
 
 
 
 
300b2d5
 
 
70f4221
300b2d5
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
import os
import gradio as gr
from langchain.chat_models import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
from langchain.chains import LLMChain, SequentialChain
from langchain.memory import ConversationBufferMemory
import random

# Global variables
current_story = ""
story_branches = {}
story_history = []

def generate_story_idea(llm, genre, theme, length):
    """Generate a story idea based on user inputs"""
    idea_prompt = ChatPromptTemplate.from_template(
        """You are a creative writing assistant.
        Generate a compelling story idea for a {length} story in the {genre} genre 
        that explores the theme of {theme}.
        Include a brief plot outline, main character, and setting.
        Respond with just the idea, no additional commentary.
        """
    )
    
    idea_chain = LLMChain(llm=llm, prompt=idea_prompt, output_key="story_idea")
    return idea_chain.run(genre=genre, theme=theme, length=length)

def create_story_beginning(llm, story_idea):
    """Create the beginning of a story based on the idea"""
    beginning_prompt = ChatPromptTemplate.from_template(
        """You are a creative writing assistant.
        Based on this story idea: {story_idea}
        
        Write an engaging opening for this story that introduces the main character and setting.
        Make it compelling and hook the reader immediately.
        Write approximately 250-350 words.
        """
    )
    
    beginning_chain = LLMChain(llm=llm, prompt=beginning_prompt, output_key="story_beginning")
    return beginning_chain.run(story_idea=story_idea)

def continue_linear_story(llm, story_so_far):
    """Continue a linear story based on the story so far"""
    continue_prompt = ChatPromptTemplate.from_template(
        """You are a creative writing assistant.
        Continue this story:
        
        {story_so_far}
        
        Write the next part of the story (approximately 250-350 words), advancing the plot in an interesting way.
        End at a point that feels satisfying but leaves room for more story.
        """
    )
    
    continue_chain = LLMChain(llm=llm, prompt=continue_prompt, output_key="story_continuation")
    return continue_chain.run(story_so_far=story_so_far)

def generate_story_branches(llm, story_so_far):
    """Generate three possible story continuations as branches"""
    branch_prompt = ChatPromptTemplate.from_template(
        """You are a creative writing assistant.
        Based on this story so far:
        
        {story_so_far}
        
        Generate THREE possible directions the story could take next. For each:
        1. Provide a brief title (10 words or less)
        2. Write a short description (1-2 sentences)
        
        Format as:
        Option 1: [Title]
        [Description]
        
        Option 2: [Title]
        [Description]
        
        Option 3: [Title]
        [Description]
        """
    )
    
    branch_chain = LLMChain(llm=llm, prompt=branch_prompt, output_key="story_branches")
    branches_text = branch_chain.run(story_so_far=story_so_far)
    
    # Parse the branches
    branches = []
    lines = branches_text.strip().split('\n')
    current_option = None
    current_description = ""
    
    for line in lines:
        if line.startswith("Option "):
            if current_option:
                branches.append((current_option, current_description.strip()))
            current_option = line.split(": ", 1)[1] if ": " in line else line
            current_description = ""
        elif current_option is not None:
            current_description += line + " "
    
    if current_option:
        branches.append((current_option, current_description.strip()))
    
    return branches

def continue_branch(llm, story_so_far, branch_title, branch_description):
    """Continue the story based on the selected branch"""
    branch_continue_prompt = ChatPromptTemplate.from_template(
        """You are a creative writing assistant.
        Continue this story:
        
        {story_so_far}
        
        The story should now follow this direction:
        {branch_title} - {branch_description}
        
        Write the next part of the story (approximately 250-350 words) following this direction.
        Make it engaging and consistent with what came before.
        """
    )
    
    branch_chain = LLMChain(llm=llm, prompt=branch_continue_prompt, output_key="branch_continuation")
    return branch_chain.run(
        story_so_far=story_so_far, 
        branch_title=branch_title, 
        branch_description=branch_description
    )

def initialize_story(api_key, genre, theme, length):
    """Initialize a new story with the given parameters"""
    global current_story, story_branches, story_history
    
    try:
        # Initialize LLM
        llm = ChatOpenAI(
            openai_api_key=api_key,
            model="gpt-4.5-preview",
            temperature=0.7
        )
        
        # Reset story state
        current_story = ""
        story_branches = {}
        story_history = []
        
        # Generate story idea and beginning
        story_idea = generate_story_idea(llm, genre, theme, length)
        story_beginning = create_story_beginning(llm, story_idea)
        
        current_story = story_beginning
        story_history.append(current_story)
        
        # Return the story beginning and available actions
        return (
            f"Story Idea:\n{story_idea}\n\n" + 
            f"Story Beginning:\n{story_beginning}", 
            gr.update(visible=True), 
            gr.update(visible=True),
            gr.update(visible=True)
        )
    except Exception as e:
        return f"Error initializing story: {str(e)}", gr.update(visible=False), gr.update(visible=False), gr.update(visible=False)

def continue_story(api_key):
    """Continue the linear story"""
    global current_story, story_history
    
    try:
        # Initialize LLM
        llm = ChatOpenAI(
            openai_api_key=api_key,
            model="gpt-4.5-preview",
            temperature=0.7
        )
        
        # Continue the story
        continuation = continue_linear_story(llm, current_story)
        current_story += "\n\n" + continuation
        story_history.append(continuation)
        
        return current_story, gr.update(value="Story continued successfully!")
    except Exception as e:
        return current_story, gr.update(value=f"Error continuing story: {str(e)}")

def generate_branches(api_key):
    """Generate branching options for the story"""
    global current_story, story_branches
    
    try:
        # Initialize LLM
        llm = ChatOpenAI(
            openai_api_key=api_key,
            model="gpt-4.5-preview",
            temperature=0.8  # Higher temperature for more creative branches
        )
        
        # Generate branches
        branches = generate_story_branches(llm, current_story)
        story_branches = {f"Option {i+1}: {title}": (title, desc) for i, (title, desc) in enumerate(branches)}
        
        # Create formatted output for display
        branches_output = "\n\n".join([f"{option}\n{desc}" for option, (_, desc) in story_branches.items()])
        
        # Create radio options
        radio_options = list(story_branches.keys())
        
        return branches_output, gr.update(choices=radio_options, value=radio_options[0] if radio_options else None, visible=True), gr.update(visible=True)
    except Exception as e:
        return f"Error generating branches: {str(e)}", gr.update(visible=False), gr.update(visible=False)

def select_branch(api_key, selected_branch):
    """Continue the story based on the selected branch"""
    global current_story, story_branches, story_history
    
    try:
        if not selected_branch or selected_branch not in story_branches:
            return current_story, "Please select a valid branch."
        
        # Initialize LLM
        llm = ChatOpenAI(
            openai_api_key=api_key,
            model="gpt-4-turbo",
            temperature=0.7
        )
        
        # Get branch details
        branch_title, branch_description = story_branches[selected_branch]
        
        # Continue along the selected branch
        continuation = continue_branch(llm, current_story, branch_title, branch_description)
        
        # Update story state
        current_story += f"\n\n[{selected_branch}]\n\n" + continuation
        story_history.append(f"[Branch: {branch_title}] {continuation}")
        
        return current_story, gr.update(value="Branch selected and story continued!")
    except Exception as e:
        return current_story, gr.update(value=f"Error selecting branch: {str(e)}")

def create_app():
    """Create the Gradio interface"""
    with gr.Blocks(title="AI Story Generator", theme=gr.themes.Soft()) as app:
        gr.Markdown("# πŸ“š AI-Powered Story Generator")
        gr.Markdown("Create interactive stories with branching narratives using OpenAI and LangChain")
        
        with gr.Row():
            with gr.Column(scale=1):
                api_key = gr.Textbox(
                    label="OpenAI API πŸ”‘", 
                    placeholder="Enter your OpenAI πŸ”‘ key here", 
                    type="password"
                )
                
                with gr.Group():
                    # gr.Markdown("### Story Parameters")
                    genre = gr.Dropdown(
                        label="Story Parameters ➑️ Genre",
                        choices=[
                            "Fantasy", "Science Fiction", "Mystery", "Romance", 
                            "Horror", "Adventure", "Historical Fiction", "Comedy"
                        ],
                        value="Fantasy"
                    )
                    theme = gr.Textbox(
                        label="Story Parameters ➑️ Theme", 
                        placeholder="e.g., Redemption, Loss, Discovery" 
                        # value="Adventure"
                    )
                    length = gr.Radio(
                        label="Story Parameters ➑️ Story Length", 
                        choices=["Short Story", "Novella", "Novel Chapter"],
                        value="Short Story"
                    )
                
                with gr.Row():
                    start_btn = gr.Button("Start New Story", variant="primary")
                
                with gr.Row():
                    continue_btn = gr.Button("Continue Story", visible=False)
                    branch_btn = gr.Button("Generate Branch Options", visible=False)
                
                status = gr.Textbox(label="Status", value="", visible=True)
                
                with gr.Group(visible=False) as branch_group:
                    branch_output = gr.Textbox(label="Story Branches")
                    branch_selection = gr.Radio(label="Select a Branch", choices=[])
                    select_branch_btn = gr.Button("Continue with Selected Branch")
            
            with gr.Column(scale=2):
                story_output = gr.Textbox(
                    label="Generated Story", 
                    placeholder="Your story will appear here...",
                    lines=20
                )
        
        # Define button click events
        start_btn.click(
            initialize_story,
            inputs=[api_key, genre, theme, length],
            outputs=[story_output, continue_btn, branch_btn, branch_group]
        )
        
        continue_btn.click(
            continue_story,
            inputs=[api_key],
            outputs=[story_output, status]
        )
        
        branch_btn.click(
            generate_branches,
            inputs=[api_key],
            outputs=[branch_output, branch_selection, select_branch_btn]
        )
        
        select_branch_btn.click(
            select_branch,
            inputs=[api_key, branch_selection],
            outputs=[story_output, status]
        )
        
        # Example usage
        gr.Examples(
            examples=[
                ["Fantasy", "Coming of age", "Short Story"],
                ["Science Fiction", "Artificial intelligence", "Novella"],
                ["Mystery", "Betrayal", "Novel Chapter"]
            ],
            inputs=[genre, theme, length],
            outputs=[]
        )

        # Add accordions with option explanations
        with gr.Accordion("πŸ“ Understanding Story Options", open=True):
            with gr.Accordion("⭐ Genre Options", open=False):
                gr.Markdown("""
                **Genre** determines the type and style of your story:
                
                - **Fantasy**: Stories with magic, mythical creatures, or supernatural elements (like Harry Potter or Lord of the Rings)
                - **Science Fiction**: Stories involving futuristic technology, space travel, or scientific concepts
                - **Mystery**: Stories centered around solving a crime or puzzle
                - **Romance**: Stories focusing on relationships and love
                - **Horror**: Stories designed to frighten or disturb the reader
                - **Adventure**: Stories with exciting journeys, quests, or challenges
                - **Historical Fiction**: Stories set in the past with historically accurate settings
                - **Comedy**: Humorous stories meant to entertain and make readers laugh
                """)
            
            with gr.Accordion("⭐ Theme Suggestions", open=False):
                gr.Markdown("""
                **Theme** is the central idea or message that runs throughout your story. Some examples:
                
                - **Redemption**: A character seeking forgiveness or making amends
                - **Loss**: Dealing with grief or the absence of something important
                - **Discovery**: Finding something new about oneself or the world
                - **Adventure**: Exploring new places or situations
                - **Coming of age**: A character maturing or growing up
                - **Betrayal**: Dealing with trust being broken
                - **Power**: The effects of gaining or losing control
                - **Justice**: Exploring fairness and moral rights
                - **Love**: Different types of relationships and connections
                - **Identity**: Understanding who one truly is
                """)
            
            with gr.Accordion("⭐ Story Length Options", open=False):
                gr.Markdown("""
                **Story Length** determines the scope of your generated story:
                
                - **Short Story**: A brief, self-contained narrative (typically 1,000-7,500 words) that can be read in one sitting
                - **Novella**: A medium-length story (typically 17,500-40,000 words), longer than a short story but shorter than a novel
                - **Novel Chapter**: A section of what could be a longer work, focusing on just one part of a potentially larger narrative
                """)
            
            with gr.Accordion("⭐ Interface Elements", open=False):
                gr.Markdown("""
                **Status**: Shows feedback about your story generation process (success messages, errors, confirmations)
                
                **Story Branches**: When you click "Generate Branch Options," this area shows three different possible directions for your story. Each includes a title and brief description.
                
                **Select a Branch**: Radio buttons that let you choose which story direction to follow. After selecting one, click "Continue with Selected Branch" to develop that storyline.
                
                **Generated Story**: The main output area where your complete story appears, including all continuations and branches you've selected.
                """)
        
        gr.Markdown("""
        ## How to Use
        1. Enter your OpenAI API πŸ”‘
        2. Select a genre, theme, and length
        3. Click "Start New Story" to begin
        4. Continue the linear story or generate branching options
        5. If you chose branching, select a branch to follow
        
        This app uses LangChain to orchestrate the story generation process and OpenAI's models to create the content.
        """)
    
    return app

if __name__ == "__main__":
    app = create_app()
    app.launch()