Spaces:
Runtime error
Runtime error
Make it run on long videos
Browse files
utils.py
CHANGED
@@ -154,20 +154,28 @@ def prepare_video(video_path:str, resolution:int, device, dtype, normalize=True,
|
|
154 |
else:
|
155 |
video = video.asnumpy()
|
156 |
_, h, w, _ = video.shape
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
|
167 |
-
|
168 |
-
|
169 |
-
|
170 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
171 |
return video, output_fps
|
172 |
|
173 |
|
|
|
154 |
else:
|
155 |
video = video.asnumpy()
|
156 |
_, h, w, _ = video.shape
|
157 |
+
|
158 |
+
video_resized = []
|
159 |
+
for f in range(video.shape[0]):
|
160 |
+
frame = video[f:f+1, ...]
|
161 |
+
|
162 |
+
frame = rearrange(frame, "f h w c -> f c h w")
|
163 |
+
frame = torch.Tensor(frame).to(device).to(dtype)
|
164 |
+
|
165 |
+
# Use max if you want the larger side to be equal to resolution (e.g. 512)
|
166 |
+
# k = float(resolution) / min(h, w)
|
167 |
+
k = float(resolution) / max(h, w)
|
168 |
+
h *= k
|
169 |
+
w *= k
|
170 |
+
h = int(np.round(h / 64.0)) * 64
|
171 |
+
w = int(np.round(w / 64.0)) * 64
|
172 |
+
|
173 |
+
frame = Resize((h, w), interpolation=InterpolationMode.BILINEAR, antialias=True)(frame)
|
174 |
+
if normalize:
|
175 |
+
frame = frame / 127.5 - 1.0
|
176 |
+
video_resized.append(frame)
|
177 |
+
video = torch.cat(video_resized)
|
178 |
+
|
179 |
return video, output_fps
|
180 |
|
181 |
|