lucianosb commited on
Commit
db9e2d6
·
verified ·
1 Parent(s): 96cfe2d

revert changes

Browse files
Files changed (1) hide show
  1. app.py +8 -68
app.py CHANGED
@@ -8,7 +8,7 @@ from diffusers import (
8
  UNet2DConditionModel,
9
  StableDiffusion3Pipeline
10
  )
11
- from transformers import BlipProcessor, BlipForConditionalGeneration, pipeline
12
  from pathlib import Path
13
  from safetensors.torch import load_file
14
  from huggingface_hub import hf_hub_download
@@ -86,44 +86,6 @@ def genderplot(genlist):
86
  ax.add_patch(plt.Rectangle((0, 0), 1, 1, color=word_colors[i]))
87
  return fig
88
 
89
- def age_detector(image):
90
- pipe = pipeline('image-classification', model="dima806/faces_age_detection", device=0)
91
- result = pipe(image)
92
- print(result)
93
- filtered_labels = [item['label'] for item in result if item['score'] > 0.85]
94
- print(filtered_labels)
95
- return filtered_labels
96
-
97
- def ageplot(agelist):
98
- order = ["YOUNG", "MIDDLE", "OLD"]
99
- words = sorted(agelist, key=lambda x: order.index(x))
100
- colors = {"YOUNG": "lightblue", "MIDDLE": "blue", "OLD": "darkblue", "Unsure": "lightgrey"}
101
- word_colors = [colors[word] for word in words]
102
- fig, axes = plt.subplots(2, 5, figsize=(5,5))
103
- plt.subplots_adjust(hspace=0.1, wspace=0.1)
104
- for i, ax in enumerate(axes.flat):
105
- ax.set_axis_off()
106
- ax.add_patch(plt.Rectangle((0, 0), 1, 1, color=word_colors[i]))
107
- return fig
108
-
109
- def is_nsfw(image):
110
- classifier = pipeline("image-classification", model="Falconsai/nsfw_image_detection")
111
- result = classifier(image)
112
- filtered_labels = [item['label'] for item in result if item['score'] > 0.85]
113
- return filtered_labels[0]
114
-
115
- def nsfwplot(nsfwlist):
116
- order = ["normal", "nsfw"]
117
- words = sorted(nsfwlist, key=lambda x: order.index(x))
118
- colors = {"normal": "lightred", "nsfw": "darkred", "Unsure": "lightgrey"}
119
- word_colors = [colors[word] for word in words]
120
- fig, axes = plt.subplots(2, 5, figsize=(5,5))
121
- plt.subplots_adjust(hspace=0.1, wspace=0.1)
122
- for i, ax in enumerate(axes.flat):
123
- ax.set_axis_off()
124
- ax.add_patch(plt.Rectangle((0, 0), 1, 1, color=word_colors[i]))
125
- return fig
126
-
127
  def skintoneplot(hex_codes):
128
  hex_codes = [code for code in hex_codes if code is not None]
129
  rgb_values = [hex2color(hex_code) for hex_code in hex_codes]
@@ -146,13 +108,9 @@ def generate_images_plots(prompt, model_name):
146
  images = [getimgen(prompt, model_name) for _ in range(10)]
147
  genders = []
148
  skintones = []
149
- ages = []
150
- nsfws = []
151
  for image, i in zip(images, range(10)):
152
  prompt_prefix = "photo of a "
153
  caption = blip_caption_image(image, prefix=prompt_prefix)
154
- age = age_detector(image)
155
- nsfw = is_nsfw(image)
156
  image.save(f"{foldername}/image_{i}.png")
157
  try:
158
  skintoneres = stone.process(f"{foldername}/image_{i}.png", return_report_image=False)
@@ -161,46 +119,31 @@ def generate_images_plots(prompt, model_name):
161
  except:
162
  skintones.append(None)
163
  genders.append(genderfromcaption(caption))
164
- ages.append(age)
165
- nsfws.append(nsfw)
166
- return images, skintoneplot(skintones), genderplot(genders), ageplot(ages), nsfwplot(nsfws)
167
 
168
- with gr.Blocks(title="Bias detection in SinteticoXL Models") as demo:
169
- gr.Markdown("# Bias detection in SinteticoXL Models")
170
  gr.Markdown('''
171
- In this demo, we explore the potential biases in text-to-image models by generating multiple images based on user prompts and analyzing the gender, skin tone, and age of the generated subjects as well as the potential for NSFW content. Here's how the analysis works:
172
 
173
  1. **Image Generation**: For each prompt, 10 images are generated using the selected model.
174
  2. **Gender Detection**: The [BLIP caption generator](https://huggingface.co/Salesforce/blip-image-captioning-large) is used to elicit gender markers by identifying words like "man," "boy," "woman," and "girl" in the captions.
175
  3. **Skin Tone Classification**: The [skin-tone-classifier library](https://github.com/ChenglongMa/SkinToneClassifier) is used to extract the skin tones of the generated subjects.
176
- 4. **Age Detection**: The [Faces Age Detection model](https://huggingface.co/dima806/faces_age_detection) is used to identify the age of the generated subjects.
177
- 5. **NSFW Detection**: The [Falconsai/nsfw_image_detection](https://huggingface.co/Falconsai/nsfw_image_detection) model is used to identify whether the generated images are NSFW (not safe for work).
178
 
179
  ## Models
180
 
181
  - Sintetico XL: a merged model with my favorite aesthetics
182
  - Sintetico XL Prude: a SFW version that aims to remove unwanted nudity and sexual content.
183
 
184
-
185
- ''')
186
- with gr.Accordion("Open for More Information!", open=False):
187
- gr.Markdown('''
188
- This space was clone from [JournalistsonHF/text-to-image-bias](https://huggingface.co/spaces/JournalistsonHF/text-to-image-bias).
189
-
190
- 👉 It's also in line with "Stable Bias" work by Hugging Face's ML & Society team: https://huggingface.co/spaces/society-ethics/StableBias
191
-
192
- This demo provides an insightful look into how current text-to-image models handle sensitive attributes, shedding light on areas for improvement and further study.
193
- [Here is an article](https://medium.com/@evijit/analysis-of-ai-generated-images-of-indian-people-for-colorism-and-sexism-b80ff946759f) showing how this space can be used to perform such analyses, using colorism and sexism in India as an example.
194
-
195
  #### Visualization
196
 
197
  We create visual grids to represent the data:
198
 
199
  - **Skin Tone Grids**: Skin tones are plotted as exact hex codes rather than using the Fitzpatrick scale, which can be [problematic and limiting for darker skin tones](https://arxiv.org/pdf/2309.05148).
200
  - **Gender Grids**: Light green denotes men, dark green denotes women, and grey denotes cases where the BLIP caption did not specify a binary gender.
201
- - **Age Grids**: Light blue denotes people between 18 and 30, blue denotes people between 30 and 50, and dark blue denotes people older than 50.
202
- - **NSFW Grids**: Light red denotes SFW images, and dark red denotes NSFW images.
203
 
 
 
204
  ''')
205
  model_dropdown = gr.Dropdown(
206
  label="Choose a model",
@@ -224,9 +167,6 @@ We create visual grids to represent the data:
224
  with gr.Row(equal_height=True):
225
  skinplot = gr.Plot(label="Skin Tone")
226
  genplot = gr.Plot(label="Gender")
227
- with gr.Row(equal_height=True):
228
- ageplot = gr.Plot(label="Age")
229
- nsfwplot = gr.Plot(label="NSFW")
230
- btn.click(generate_images_plots, inputs=[prompt, model_dropdown], outputs=[gallery, skinplot, genplot, ageplot, nsfwplot])
231
 
232
  demo.launch(debug=True)
 
8
  UNet2DConditionModel,
9
  StableDiffusion3Pipeline
10
  )
11
+ from transformers import BlipProcessor, BlipForConditionalGeneration
12
  from pathlib import Path
13
  from safetensors.torch import load_file
14
  from huggingface_hub import hf_hub_download
 
86
  ax.add_patch(plt.Rectangle((0, 0), 1, 1, color=word_colors[i]))
87
  return fig
88
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
  def skintoneplot(hex_codes):
90
  hex_codes = [code for code in hex_codes if code is not None]
91
  rgb_values = [hex2color(hex_code) for hex_code in hex_codes]
 
108
  images = [getimgen(prompt, model_name) for _ in range(10)]
109
  genders = []
110
  skintones = []
 
 
111
  for image, i in zip(images, range(10)):
112
  prompt_prefix = "photo of a "
113
  caption = blip_caption_image(image, prefix=prompt_prefix)
 
 
114
  image.save(f"{foldername}/image_{i}.png")
115
  try:
116
  skintoneres = stone.process(f"{foldername}/image_{i}.png", return_report_image=False)
 
119
  except:
120
  skintones.append(None)
121
  genders.append(genderfromcaption(caption))
122
+ return images, skintoneplot(skintones), genderplot(genders)
 
 
123
 
124
+ with gr.Blocks(title="Skin Tone and Gender bias in Text-to-Image Generation Models") as demo:
125
+ gr.Markdown("# Skin Tone and Gender bias in Text to Image Models")
126
  gr.Markdown('''
127
+ In this demo, we explore the potential biases in text-to-image models by generating multiple images based on user prompts and analyzing the gender and skin tone of the generated subjects. Here's how the analysis works:
128
 
129
  1. **Image Generation**: For each prompt, 10 images are generated using the selected model.
130
  2. **Gender Detection**: The [BLIP caption generator](https://huggingface.co/Salesforce/blip-image-captioning-large) is used to elicit gender markers by identifying words like "man," "boy," "woman," and "girl" in the captions.
131
  3. **Skin Tone Classification**: The [skin-tone-classifier library](https://github.com/ChenglongMa/SkinToneClassifier) is used to extract the skin tones of the generated subjects.
 
 
132
 
133
  ## Models
134
 
135
  - Sintetico XL: a merged model with my favorite aesthetics
136
  - Sintetico XL Prude: a SFW version that aims to remove unwanted nudity and sexual content.
137
 
 
 
 
 
 
 
 
 
 
 
 
138
  #### Visualization
139
 
140
  We create visual grids to represent the data:
141
 
142
  - **Skin Tone Grids**: Skin tones are plotted as exact hex codes rather than using the Fitzpatrick scale, which can be [problematic and limiting for darker skin tones](https://arxiv.org/pdf/2309.05148).
143
  - **Gender Grids**: Light green denotes men, dark green denotes women, and grey denotes cases where the BLIP caption did not specify a binary gender.
 
 
144
 
145
+ This demo provides an insightful look into how current text-to-image models handle sensitive attributes, shedding light on areas for improvement and further study.
146
+ [Here is an article](https://medium.com/@evijit/analysis-of-ai-generated-images-of-indian-people-for-colorism-and-sexism-b80ff946759f) showing how this space can be used to perform such analyses, using colorism and sexism in India as an example.
147
  ''')
148
  model_dropdown = gr.Dropdown(
149
  label="Choose a model",
 
167
  with gr.Row(equal_height=True):
168
  skinplot = gr.Plot(label="Skin Tone")
169
  genplot = gr.Plot(label="Gender")
170
+ btn.click(generate_images_plots, inputs=[prompt, model_dropdown], outputs=[gallery, skinplot, genplot])
 
 
 
171
 
172
  demo.launch(debug=True)