Spaces:
				
			
			
	
			
			
		Running
		
			on 
			
			Zero
	
	
	
			
			
	
	
	
	
		
		
		Running
		
			on 
			
			Zero
	Update pipeline_flux_kontext.py
Browse files- pipeline_flux_kontext.py +8 -8
    	
        pipeline_flux_kontext.py
    CHANGED
    
    | @@ -26,11 +26,11 @@ from transformers import ( | |
| 26 | 
             
                T5TokenizerFast,
         | 
| 27 | 
             
            )
         | 
| 28 |  | 
| 29 | 
            -
            from  | 
| 30 | 
            -
            from  | 
| 31 | 
            -
            from  | 
| 32 | 
            -
            from  | 
| 33 | 
            -
            from  | 
| 34 | 
             
                USE_PEFT_BACKEND,
         | 
| 35 | 
             
                is_torch_xla_available,
         | 
| 36 | 
             
                logging,
         | 
| @@ -38,9 +38,9 @@ from ...utils import ( | |
| 38 | 
             
                scale_lora_layers,
         | 
| 39 | 
             
                unscale_lora_layers,
         | 
| 40 | 
             
            )
         | 
| 41 | 
            -
            from  | 
| 42 | 
            -
            from  | 
| 43 | 
            -
            from .pipeline_output import FluxPipelineOutput
         | 
| 44 |  | 
| 45 |  | 
| 46 | 
             
            if is_torch_xla_available():
         | 
|  | |
| 26 | 
             
                T5TokenizerFast,
         | 
| 27 | 
             
            )
         | 
| 28 |  | 
| 29 | 
            +
            from diffusers.image_processor import PipelineImageInput, VaeImageProcessor
         | 
| 30 | 
            +
            from diffusers.loaders import FluxIPAdapterMixin, FluxLoraLoaderMixin, FromSingleFileMixin, TextualInversionLoaderMixin
         | 
| 31 | 
            +
            from diffusers.models import AutoencoderKL, FluxTransformer2DModel
         | 
| 32 | 
            +
            from diffusers.schedulers import FlowMatchEulerDiscreteScheduler
         | 
| 33 | 
            +
            from diffusers.utils import (
         | 
| 34 | 
             
                USE_PEFT_BACKEND,
         | 
| 35 | 
             
                is_torch_xla_available,
         | 
| 36 | 
             
                logging,
         | 
|  | |
| 38 | 
             
                scale_lora_layers,
         | 
| 39 | 
             
                unscale_lora_layers,
         | 
| 40 | 
             
            )
         | 
| 41 | 
            +
            from diffusers.utils.torch_utils import randn_tensor
         | 
| 42 | 
            +
            from diffusers.pipelines.pipeline_utils import DiffusionPipeline
         | 
| 43 | 
            +
            from diffusers.pipelines.flux.pipeline_output import FluxPipelineOutput
         | 
| 44 |  | 
| 45 |  | 
| 46 | 
             
            if is_torch_xla_available():
         | 

