batch_size
Browse files
    	
        App/Generate/Schema.py
    CHANGED
    
    | @@ -6,6 +6,7 @@ from pydantic import validator | |
| 6 | 
             
            class GeneratorRequest(BaseModel):
         | 
| 7 | 
             
                prompt: str
         | 
| 8 | 
             
                grok: Optional[bool] = True
         | 
|  | |
| 9 | 
             
                model: str = "llama3-70b-8192"
         | 
| 10 |  | 
| 11 |  | 
|  | |
| 6 | 
             
            class GeneratorRequest(BaseModel):
         | 
| 7 | 
             
                prompt: str
         | 
| 8 | 
             
                grok: Optional[bool] = True
         | 
| 9 | 
            +
                batch_size: int = 4
         | 
| 10 | 
             
                model: str = "llama3-70b-8192"
         | 
| 11 |  | 
| 12 |  | 
    	
        App/Generate/generatorRoutes.py
    CHANGED
    
    | @@ -1,6 +1,7 @@ | |
| 1 | 
             
            from fastapi import APIRouter, HTTPException, status, BackgroundTasks, UploadFile, Query
         | 
| 2 | 
             
            from .Schema import GeneratorRequest, GeneratorBulkRequest
         | 
| 3 | 
             
            from .utils.GroqInstruct import chatbot, VideoOutput
         | 
|  | |
| 4 | 
             
            from .utils.HuggingChat import Hugging
         | 
| 5 | 
             
            from .Story.Story import Story
         | 
| 6 | 
             
            import asyncio, pprint, json
         | 
| @@ -19,6 +20,7 @@ async def update_scene(model_scene): | |
| 19 |  | 
| 20 | 
             
            async def main(request: GeneratorRequest):
         | 
| 21 | 
             
                topic = request.prompt
         | 
|  | |
| 22 | 
             
                renderr = RenderVideo()
         | 
| 23 | 
             
                huggChat = Hugging()
         | 
| 24 | 
             
                if request.grok:
         | 
| @@ -38,8 +40,10 @@ async def main(request: GeneratorRequest): | |
| 38 | 
             
                # Assuming generated_story.scenes is a list of scenes
         | 
| 39 | 
             
                scene_updates = []
         | 
| 40 | 
             
                with tqdm(total=len(generated_story.scenes)) as pbar:
         | 
| 41 | 
            -
                    for i in range(0, len(generated_story.scenes),  | 
| 42 | 
            -
                        batch = generated_story.scenes[ | 
|  | |
|  | |
| 43 | 
             
                        batch_updates = []
         | 
| 44 |  | 
| 45 | 
             
                        for story_scene in batch:
         | 
|  | |
| 1 | 
             
            from fastapi import APIRouter, HTTPException, status, BackgroundTasks, UploadFile, Query
         | 
| 2 | 
             
            from .Schema import GeneratorRequest, GeneratorBulkRequest
         | 
| 3 | 
             
            from .utils.GroqInstruct import chatbot, VideoOutput
         | 
| 4 | 
            +
            from .utils.Cohere import chatbot as cohere_chat
         | 
| 5 | 
             
            from .utils.HuggingChat import Hugging
         | 
| 6 | 
             
            from .Story.Story import Story
         | 
| 7 | 
             
            import asyncio, pprint, json
         | 
|  | |
| 20 |  | 
| 21 | 
             
            async def main(request: GeneratorRequest):
         | 
| 22 | 
             
                topic = request.prompt
         | 
| 23 | 
            +
                batch_size = request.batch_size
         | 
| 24 | 
             
                renderr = RenderVideo()
         | 
| 25 | 
             
                huggChat = Hugging()
         | 
| 26 | 
             
                if request.grok:
         | 
|  | |
| 40 | 
             
                # Assuming generated_story.scenes is a list of scenes
         | 
| 41 | 
             
                scene_updates = []
         | 
| 42 | 
             
                with tqdm(total=len(generated_story.scenes)) as pbar:
         | 
| 43 | 
            +
                    for i in range(0, len(generated_story.scenes), batch_size):
         | 
| 44 | 
            +
                        batch = generated_story.scenes[
         | 
| 45 | 
            +
                            i : i + batch_size
         | 
| 46 | 
            +
                        ]  # Get a batch of two story scenes
         | 
| 47 | 
             
                        batch_updates = []
         | 
| 48 |  | 
| 49 | 
             
                        for story_scene in batch:
         |