Spaces:
				
			
			
	
			
			
		Runtime error
		
	
	
	
			
			
	
	
	
	
		
		
		Runtime error
		
	disable torch.compile
Browse files
    	
        app.py
    CHANGED
    
    | @@ -9,6 +9,7 @@ from diffusers.utils import numpy_to_pil | |
| 9 | 
             
            from diffusers import WuerstchenDecoderPipeline, WuerstchenPriorPipeline
         | 
| 10 | 
             
            from diffusers.pipelines.wuerstchen import WuerstchenPrior, default_stage_c_timesteps
         | 
| 11 | 
             
            from previewer.modules import Previewer
         | 
|  | |
| 12 |  | 
| 13 | 
             
            DESCRIPTION = "# Würstchen"
         | 
| 14 | 
             
            if not torch.cuda.is_available():
         | 
| @@ -17,7 +18,7 @@ if not torch.cuda.is_available(): | |
| 17 | 
             
            MAX_SEED = np.iinfo(np.int32).max
         | 
| 18 | 
             
            CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"
         | 
| 19 | 
             
            MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1536"))
         | 
| 20 | 
            -
            USE_TORCH_COMPILE =  | 
| 21 | 
             
            ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
         | 
| 22 | 
             
            PREVIEW_IMAGES = True
         | 
| 23 |  | 
|  | |
| 9 | 
             
            from diffusers import WuerstchenDecoderPipeline, WuerstchenPriorPipeline
         | 
| 10 | 
             
            from diffusers.pipelines.wuerstchen import WuerstchenPrior, default_stage_c_timesteps
         | 
| 11 | 
             
            from previewer.modules import Previewer
         | 
| 12 | 
            +
            os.environ['TOKENIZERS_PARALLELISM'] = 'false'
         | 
| 13 |  | 
| 14 | 
             
            DESCRIPTION = "# Würstchen"
         | 
| 15 | 
             
            if not torch.cuda.is_available():
         | 
|  | |
| 18 | 
             
            MAX_SEED = np.iinfo(np.int32).max
         | 
| 19 | 
             
            CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES") == "1"
         | 
| 20 | 
             
            MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1536"))
         | 
| 21 | 
            +
            USE_TORCH_COMPILE = False
         | 
| 22 | 
             
            ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD") == "1"
         | 
| 23 | 
             
            PREVIEW_IMAGES = True
         | 
| 24 |  | 
 
			

