Spaces:
Running
on
Zero
Running
on
Zero
make zerogpu duration dynamic to avoid GPU task abort errors
Browse files
app.py
CHANGED
|
@@ -145,7 +145,19 @@ pipe = QwenImageEditPipeline.from_pretrained("Qwen/Qwen-Image-Edit", torch_dtype
|
|
| 145 |
MAX_SEED = np.iinfo(np.int32).max
|
| 146 |
|
| 147 |
# --- Main Inference Function (with hardcoded negative prompt) ---
|
| 148 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 149 |
def infer(
|
| 150 |
image,
|
| 151 |
prompt,
|
|
|
|
| 145 |
MAX_SEED = np.iinfo(np.int32).max
|
| 146 |
|
| 147 |
# --- Main Inference Function (with hardcoded negative prompt) ---
|
| 148 |
+
def get_duration(
|
| 149 |
+
image,
|
| 150 |
+
prompt,
|
| 151 |
+
seed,
|
| 152 |
+
randomize_seed,
|
| 153 |
+
true_guidance_scale,
|
| 154 |
+
num_inference_steps,
|
| 155 |
+
rewrite_prompt,
|
| 156 |
+
progress
|
| 157 |
+
):
|
| 158 |
+
return num_inference_steps * 3
|
| 159 |
+
|
| 160 |
+
@spaces.GPU(duration=get_duration)
|
| 161 |
def infer(
|
| 162 |
image,
|
| 163 |
prompt,
|