Fix logging
Browse files- handler.py +13 -18
handler.py
CHANGED
@@ -23,25 +23,20 @@ class EndpointHandler:
|
|
23 |
Return:
|
24 |
A :obj:`list` | `dict`: will be serialized and returned
|
25 |
"""
|
26 |
-
|
|
|
27 |
|
28 |
-
|
29 |
-
|
30 |
-
|
|
|
|
|
31 |
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
# run prediction
|
39 |
-
generated_text: List[str] = self.processor.batch_decode(
|
40 |
-
generated_ids, skip_special_tokens=True
|
41 |
-
)
|
42 |
-
checkpoints += "decoded\n"
|
43 |
-
except Exception as e:
|
44 |
-
checkpoints += f"{e}\n"
|
45 |
|
46 |
# decode output
|
47 |
-
return generated_text
|
|
|
23 |
Return:
|
24 |
A :obj:`list` | `dict`: will be serialized and returned
|
25 |
"""
|
26 |
+
image = data.pop("inputs", data)
|
27 |
+
self.logger.info("image")
|
28 |
|
29 |
+
# process image
|
30 |
+
inputs = self.processor(images=image, return_tensors="pt").to(self.device)
|
31 |
+
self.logger.info("inputs")
|
32 |
+
generated_ids = self.model.generate(**inputs, max_new_tokens=20)
|
33 |
+
self.logger.info("generated")
|
34 |
|
35 |
+
# run prediction
|
36 |
+
generated_text = self.processor.batch_decode(
|
37 |
+
generated_ids, skip_special_tokens=True
|
38 |
+
)
|
39 |
+
self.logger.info("decoded")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
|
41 |
# decode output
|
42 |
+
return generated_text
|