karimbenharrak commited on
Commit
ea3d9f6
·
verified ·
1 Parent(s): c127bd9

Update handler.py

Browse files
Files changed (1) hide show
  1. handler.py +1 -2
handler.py CHANGED
@@ -66,7 +66,6 @@ class EndpointHandler():
66
  self.pipe3 = AutoPipelineForImage2Image.from_pipe(self.pipe2)
67
  #self.pipe3.enable_model_cpu_offload()
68
  self.pipe3.enable_xformers_memory_efficient_attention()
69
- """
70
 
71
 
72
  def __call__(self, data: Any) -> List[List[Dict[str, float]]]:
@@ -120,7 +119,7 @@ class EndpointHandler():
120
  """
121
 
122
  #pipe = AutoPipelineForInpainting.from_pretrained("diffusers/stable-diffusion-xl-1.0-inpainting-0.1", torch_dtype=torch.float16, variant="fp16").to("cuda")
123
- """
124
  # run inference pipeline
125
  out = self.pipe(prompt=prompt, negative_prompt=negative_prompt, image=image, mask_image=mask_image, num_inference_steps=num_inference_steps, guidance_scale=guidance_scale)
126
 
 
66
  self.pipe3 = AutoPipelineForImage2Image.from_pipe(self.pipe2)
67
  #self.pipe3.enable_model_cpu_offload()
68
  self.pipe3.enable_xformers_memory_efficient_attention()
 
69
 
70
 
71
  def __call__(self, data: Any) -> List[List[Dict[str, float]]]:
 
119
  """
120
 
121
  #pipe = AutoPipelineForInpainting.from_pretrained("diffusers/stable-diffusion-xl-1.0-inpainting-0.1", torch_dtype=torch.float16, variant="fp16").to("cuda")
122
+
123
  # run inference pipeline
124
  out = self.pipe(prompt=prompt, negative_prompt=negative_prompt, image=image, mask_image=mask_image, num_inference_steps=num_inference_steps, guidance_scale=guidance_scale)
125