Spaces:
				
			
			
	
			
			
		Runtime error
		
	
	
	
			
			
	
	
	
	
		
		
		Runtime error
		
	Switch to auto compute type
Browse files- app.py +1 -1
- cli.py +1 -1
- config.json5 +1 -1
    	
        app.py
    CHANGED
    
    | @@ -519,7 +519,7 @@ if __name__ == '__main__': | |
| 519 | 
             
                                    help="directory to save the outputs")
         | 
| 520 | 
             
                parser.add_argument("--whisper_implementation", type=str, default=default_whisper_implementation, choices=["whisper", "faster-whisper"],\
         | 
| 521 | 
             
                                    help="the Whisper implementation to use")
         | 
| 522 | 
            -
                parser.add_argument("--compute_type", type=str, default=default_app_config.compute_type, choices=["int8", "int8_float16", "int16", "float16"], \
         | 
| 523 | 
             
                                    help="the compute type to use for inference")
         | 
| 524 |  | 
| 525 | 
             
                args = parser.parse_args().__dict__
         | 
|  | |
| 519 | 
             
                                    help="directory to save the outputs")
         | 
| 520 | 
             
                parser.add_argument("--whisper_implementation", type=str, default=default_whisper_implementation, choices=["whisper", "faster-whisper"],\
         | 
| 521 | 
             
                                    help="the Whisper implementation to use")
         | 
| 522 | 
            +
                parser.add_argument("--compute_type", type=str, default=default_app_config.compute_type, choices=["default", "auto", "int8", "int8_float16", "int16", "float16", "float32"], \
         | 
| 523 | 
             
                                    help="the compute type to use for inference")
         | 
| 524 |  | 
| 525 | 
             
                args = parser.parse_args().__dict__
         | 
    	
        cli.py
    CHANGED
    
    | @@ -80,7 +80,7 @@ def cli(): | |
| 80 | 
             
                                    help="if True, provide the previous output of the model as a prompt for the next window; disabling may make the text inconsistent across windows, but the model becomes less prone to getting stuck in a failure loop")
         | 
| 81 | 
             
                parser.add_argument("--fp16", type=str2bool, default=app_config.fp16, \
         | 
| 82 | 
             
                                    help="whether to perform inference in fp16; True by default")
         | 
| 83 | 
            -
                parser.add_argument("--compute_type", type=str, default=app_config.compute_type, choices=["int8", "int8_float16", "int16", "float16"], \
         | 
| 84 | 
             
                                    help="the compute type to use for inference")
         | 
| 85 |  | 
| 86 | 
             
                parser.add_argument("--temperature_increment_on_fallback", type=optional_float, default=app_config.temperature_increment_on_fallback, \
         | 
|  | |
| 80 | 
             
                                    help="if True, provide the previous output of the model as a prompt for the next window; disabling may make the text inconsistent across windows, but the model becomes less prone to getting stuck in a failure loop")
         | 
| 81 | 
             
                parser.add_argument("--fp16", type=str2bool, default=app_config.fp16, \
         | 
| 82 | 
             
                                    help="whether to perform inference in fp16; True by default")
         | 
| 83 | 
            +
                parser.add_argument("--compute_type", type=str, default=app_config.compute_type, choices=["default", "auto", "int8", "int8_float16", "int16", "float16", "float32"], \
         | 
| 84 | 
             
                                    help="the compute type to use for inference")
         | 
| 85 |  | 
| 86 | 
             
                parser.add_argument("--temperature_increment_on_fallback", type=optional_float, default=app_config.temperature_increment_on_fallback, \
         | 
    	
        config.json5
    CHANGED
    
    | @@ -118,7 +118,7 @@ | |
| 118 | 
             
                // Whether to perform inference in fp16; True by default
         | 
| 119 | 
             
                "fp16": true,
         | 
| 120 | 
             
                // The compute type used by faster-whisper. Can be "int8". "int16" or "float16".
         | 
| 121 | 
            -
                "compute_type": " | 
| 122 | 
             
                // Temperature to increase when falling back when the decoding fails to meet either of the thresholds below
         | 
| 123 | 
             
                "temperature_increment_on_fallback": 0.2,
         | 
| 124 | 
             
                // If the gzip compression ratio is higher than this value, treat the decoding as failed
         | 
|  | |
| 118 | 
             
                // Whether to perform inference in fp16; True by default
         | 
| 119 | 
             
                "fp16": true,
         | 
| 120 | 
             
                // The compute type used by faster-whisper. Can be "int8". "int16" or "float16".
         | 
| 121 | 
            +
                "compute_type": "auto",
         | 
| 122 | 
             
                // Temperature to increase when falling back when the decoding fails to meet either of the thresholds below
         | 
| 123 | 
             
                "temperature_increment_on_fallback": 0.2,
         | 
| 124 | 
             
                // If the gzip compression ratio is higher than this value, treat the decoding as failed
         |