Mario Faúndez Vidal commited on
Commit
5c9a1db
·
1 Parent(s): 65dc75d

build(deps): specify gradio version constraint

Browse files

Add minimum version constraint (>=5.0.0) for gradio dependency
in pyproject.toml to ensure compatibility with latest features.

Updates:
- pyproject.toml: Add gradio>=5.0.0 constraint
- requirements.txt: Update dependencies with gradio 5.49.1
- uv.lock: Update lock file with resolved dependencies
- .github/copilot-instructions.md: Add commit guidelines

Files changed (4) hide show
  1. .github/copilot-instructions.md +83 -0
  2. pyproject.toml +1 -1
  3. requirements.txt +60 -75
  4. uv.lock +1 -1
.github/copilot-instructions.md ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # GitHub Copilot Instructions
2
+
3
+ ## Commit Message Guidelines
4
+
5
+ ### Conventional Commits
6
+ Use the Conventional Commits specification for all commit messages:
7
+
8
+ ```
9
+ <type>(<scope>): <subject>
10
+
11
+ <body>
12
+
13
+ <footer>
14
+ ```
15
+
16
+ ### Types
17
+ - `feat`: New feature
18
+ - `fix`: Bug fix
19
+ - `docs`: Documentation changes
20
+ - `style`: Code style changes (formatting, no logic changes)
21
+ - `refactor`: Code refactoring
22
+ - `perf`: Performance improvements
23
+ - `test`: Adding or updating tests
24
+ - `build`: Build system or dependencies changes
25
+ - `ci`: CI/CD configuration changes
26
+ - `chore`: Other changes that don't modify src or test files
27
+
28
+ ### Best Practices
29
+
30
+ 1. **Keep subject line short**: Max 50 characters
31
+ 2. **Use imperative mood**: "add" not "added" or "adds"
32
+ 3. **No period at the end**: Subject line should not end with a period
33
+ 4. **Capitalize first letter**: Start subject with capital letter
34
+ 5. **Separate subject from body**: Leave a blank line between subject and body
35
+ 6. **Wrap body at 72 characters**: For better readability
36
+ 7. **Use body to explain what and why**: Not how (code explains how)
37
+ 8. **Reference issues**: Use footer to reference issues (e.g., "Closes #123")
38
+
39
+ ### Examples
40
+
41
+ Good:
42
+ ```
43
+ feat(api): add hate speech detection endpoint
44
+
45
+ Implement new REST API endpoint that uses BERT model
46
+ to classify text for hate speech content.
47
+
48
+ Closes #42
49
+ ```
50
+
51
+ ```
52
+ fix(deps): update backports-strenum to 1.2.8
53
+
54
+ Fixes Python 3.11 compatibility issue in Hugging Face Space build.
55
+ ```
56
+
57
+ ```
58
+ docs: update README with deployment instructions
59
+ ```
60
+
61
+ Bad:
62
+ ```
63
+ Fixed a bug in the code that was causing problems.
64
+ ```
65
+
66
+ ```
67
+ WIP: changes to the model
68
+ ```
69
+
70
+ ```
71
+ Updated dependencies and fixed some issues with the gradio interface and also refactored the code
72
+ ```
73
+
74
+ ## Code Quality Guidelines
75
+
76
+ - Follow PEP 8 for Python code
77
+ - Use type hints where applicable
78
+ - Write docstrings for functions and classes
79
+ - Keep functions small and focused (single responsibility)
80
+ - Use meaningful variable and function names
81
+ - Add comments for complex logic only
82
+ - Prefer composition over inheritance
83
+ - Write unit tests for new features
pyproject.toml CHANGED
@@ -11,7 +11,7 @@ requires-python = ">=3.11,<3.12"
11
  license = {text = "MIT"}
12
 
13
  dependencies = [
14
- "gradio",
15
  "tensorflow-text",
16
  "tf-models-official",
17
  ]
 
11
  license = {text = "MIT"}
12
 
13
  dependencies = [
14
+ "gradio>=5.0.0",
15
  "tensorflow-text",
16
  "tf-models-official",
17
  ]
requirements.txt CHANGED
@@ -13,34 +13,25 @@ absl-py==1.4.0
13
  # tf-slim
14
  ai-edge-litert==2.0.2
15
  # via tf-models-official
16
- aiofiles==25.1.0
17
- # via gradio
18
- aiohappyeyeballs==2.6.1
19
- # via aiohttp
20
- aiohttp==3.13.0
21
- # via gradio
22
- aiosignal==1.4.0
23
- # via aiohttp
24
- altair==5.5.0
25
  # via gradio
26
  annotated-types==0.7.0
27
  # via pydantic
28
  anyio==4.11.0
29
  # via
 
30
  # httpx
31
  # starlette
32
  astunparse==1.6.3
33
  # via tensorflow
34
  attrs==25.4.0
35
- # via
36
- # aiohttp
37
- # dm-tree
38
- # jsonschema
39
- # referencing
40
- backports-strenum==1.3.1
41
  # via ai-edge-litert
42
  bleach==6.2.0
43
  # via kaggle
 
 
44
  cachetools==6.2.0
45
  # via google-auth
46
  certifi==2025.10.5
@@ -54,7 +45,9 @@ charset-normalizer==3.4.3
54
  # kaggle
55
  # requests
56
  click==8.3.0
57
- # via uvicorn
 
 
58
  colorama==0.4.6
59
  # via sacrebleu
60
  contourpy==1.3.3
@@ -77,20 +70,19 @@ fastapi==0.118.3
77
  # via gradio
78
  ffmpy==0.6.2
79
  # via gradio
 
 
80
  flatbuffers==25.9.23
81
  # via
82
  # ai-edge-litert
83
  # tensorflow
84
  fonttools==4.60.1
85
  # via matplotlib
86
- frozenlist==1.8.0
87
- # via
88
- # aiohttp
89
- # aiosignal
90
  fsspec==2025.9.0
91
  # via
92
  # etils
93
- # gradio
 
94
  gast==0.6.0
95
  # via tensorflow
96
  gin-config==0.5.0
@@ -112,8 +104,12 @@ googleapis-common-protos==1.70.0
112
  # via
113
  # google-api-core
114
  # tensorflow-metadata
115
- gradio==3.20.1
116
  # via classify-text-with-bert-hate-speech (pyproject.toml)
 
 
 
 
117
  grpcio==1.75.1
118
  # via
119
  # tensorboard
@@ -126,6 +122,8 @@ h5py==3.14.0
126
  # via
127
  # keras
128
  # tensorflow
 
 
129
  httpcore==1.0.9
130
  # via httpx
131
  httplib2==0.31.0
@@ -134,14 +132,20 @@ httplib2==0.31.0
134
  # google-auth-httplib2
135
  # oauth2client
136
  httpx==0.28.1
137
- # via gradio
 
 
 
 
 
 
 
138
  idna==3.10
139
  # via
140
  # anyio
141
  # httpx
142
  # kaggle
143
  # requests
144
- # yarl
145
  immutabledict==4.2.1
146
  # via
147
  # tensorflow-datasets
@@ -149,15 +153,9 @@ immutabledict==4.2.1
149
  importlib-resources==6.5.2
150
  # via etils
151
  jinja2==3.1.6
152
- # via
153
- # altair
154
- # gradio
155
  joblib==1.5.2
156
  # via scikit-learn
157
- jsonschema==4.25.1
158
- # via altair
159
- jsonschema-specifications==2025.9.1
160
- # via jsonschema
161
  kaggle==1.7.4.5
162
  # via tf-models-official
163
  keras==3.11.3
@@ -166,42 +164,27 @@ kiwisolver==1.4.9
166
  # via matplotlib
167
  libclang==18.1.1
168
  # via tensorflow
169
- linkify-it-py==2.0.3
170
- # via markdown-it-py
171
  lxml==6.0.2
172
  # via sacrebleu
173
  markdown==3.9
174
  # via tensorboard
175
  markdown-it-py==2.2.0
176
- # via
177
- # gradio
178
- # mdit-py-plugins
179
- # rich
180
  markupsafe==3.0.3
181
  # via
182
  # gradio
183
  # jinja2
184
  # werkzeug
185
  matplotlib==3.10.7
186
- # via
187
- # gradio
188
- # tf-models-official
189
- mdit-py-plugins==0.3.3
190
- # via gradio
191
  mdurl==0.1.2
192
  # via markdown-it-py
193
  ml-dtypes==0.5.3
194
  # via
195
  # keras
196
  # tensorflow
197
- multidict==6.7.0
198
- # via
199
- # aiohttp
200
- # yarl
201
  namex==0.1.0
202
  # via keras
203
- narwhals==2.7.0
204
- # via altair
205
  numpy==1.26.4
206
  # via
207
  # ai-edge-litert
@@ -238,7 +221,9 @@ orjson==3.11.3
238
  # via gradio
239
  packaging==25.0
240
  # via
241
- # altair
 
 
242
  # keras
243
  # matplotlib
244
  # tensorboard
@@ -256,10 +241,6 @@ portalocker==3.2.0
256
  # via sacrebleu
257
  promise==2.3
258
  # via tensorflow-datasets
259
- propcache==0.4.1
260
- # via
261
- # aiohttp
262
- # yarl
263
  proto-plus==1.26.1
264
  # via google-api-core
265
  protobuf==5.29.5
@@ -292,13 +273,11 @@ pyasn1-modules==0.4.2
292
  # oauth2client
293
  pycocotools==2.0.10
294
  # via tf-models-official
295
- pycryptodome==3.23.0
296
- # via gradio
297
- pydantic==2.12.0
298
  # via
299
  # fastapi
300
  # gradio
301
- pydantic-core==2.41.1
302
  # via pydantic
303
  pydub==0.25.1
304
  # via gradio
@@ -322,38 +301,39 @@ pytz==2025.2
322
  pyyaml==6.0.3
323
  # via
324
  # gradio
 
325
  # tf-models-official
326
- referencing==0.36.2
327
- # via
328
- # jsonschema
329
- # jsonschema-specifications
330
  regex==2025.9.18
331
  # via sacrebleu
332
  requests==2.32.5
333
  # via
334
  # google-api-core
335
- # gradio
336
  # kaggle
337
  # tensorflow
338
  # tensorflow-datasets
339
  rich==14.2.0
340
- # via keras
341
- rpds-py==0.27.1
342
  # via
343
- # jsonschema
344
- # referencing
345
  rsa==4.9.1
346
  # via
347
  # google-auth
348
  # oauth2client
 
 
349
  sacrebleu==2.5.1
350
  # via tf-models-official
 
 
351
  scikit-learn==1.7.2
352
  # via seqeval
353
  scipy==1.16.2
354
  # via
355
  # scikit-learn
356
  # tf-models-official
 
 
357
  sentencepiece==0.2.1
358
  # via tf-models-official
359
  seqeval==1.2.2
@@ -363,6 +343,8 @@ setuptools==80.9.0
363
  # kaggle
364
  # tensorboard
365
  # tensorflow
 
 
366
  simple-parsing==0.1.7
367
  # via tensorflow-datasets
368
  six==1.17.0
@@ -380,7 +362,9 @@ six==1.17.0
380
  sniffio==1.3.1
381
  # via anyio
382
  starlette==0.48.0
383
- # via fastapi
 
 
384
  tabulate==0.9.0
385
  # via sacrebleu
386
  tensorboard==2.19.0
@@ -424,36 +408,39 @@ threadpoolctl==3.6.0
424
  # via scikit-learn
425
  toml==0.10.2
426
  # via tensorflow-datasets
 
 
427
  tqdm==4.67.1
428
  # via
429
  # ai-edge-litert
430
  # etils
 
431
  # kaggle
432
  # tensorflow-datasets
 
 
433
  typing-extensions==4.15.0
434
  # via
435
  # ai-edge-litert
436
- # aiosignal
437
- # altair
438
  # anyio
439
  # etils
440
  # fastapi
441
  # gradio
 
442
  # grpcio
 
443
  # optree
444
  # pydantic
445
  # pydantic-core
446
- # referencing
447
  # simple-parsing
448
  # starlette
449
  # tensorflow
 
450
  # typing-inspection
451
  typing-inspection==0.4.2
452
  # via pydantic
453
  tzdata==2025.2
454
  # via pandas
455
- uc-micro-py==1.0.3
456
- # via linkify-it-py
457
  uritemplate==4.2.0
458
  # via google-api-python-client
459
  urllib3==2.5.0
@@ -467,7 +454,7 @@ webencodings==0.5.1
467
  # bleach
468
  # kaggle
469
  websockets==15.0.1
470
- # via gradio
471
  werkzeug==3.1.3
472
  # via tensorboard
473
  wheel==0.45.1
@@ -477,7 +464,5 @@ wrapt==1.17.3
477
  # dm-tree
478
  # tensorflow
479
  # tensorflow-datasets
480
- yarl==1.22.0
481
- # via aiohttp
482
  zipp==3.23.0
483
  # via etils
 
13
  # tf-slim
14
  ai-edge-litert==2.0.2
15
  # via tf-models-official
16
+ aiofiles==24.1.0
 
 
 
 
 
 
 
 
17
  # via gradio
18
  annotated-types==0.7.0
19
  # via pydantic
20
  anyio==4.11.0
21
  # via
22
+ # gradio
23
  # httpx
24
  # starlette
25
  astunparse==1.6.3
26
  # via tensorflow
27
  attrs==25.4.0
28
+ # via dm-tree
29
+ backports-strenum==1.2.8
 
 
 
 
30
  # via ai-edge-litert
31
  bleach==6.2.0
32
  # via kaggle
33
+ brotli==1.1.0
34
+ # via gradio
35
  cachetools==6.2.0
36
  # via google-auth
37
  certifi==2025.10.5
 
45
  # kaggle
46
  # requests
47
  click==8.3.0
48
+ # via
49
+ # typer
50
+ # uvicorn
51
  colorama==0.4.6
52
  # via sacrebleu
53
  contourpy==1.3.3
 
70
  # via gradio
71
  ffmpy==0.6.2
72
  # via gradio
73
+ filelock==3.20.0
74
+ # via huggingface-hub
75
  flatbuffers==25.9.23
76
  # via
77
  # ai-edge-litert
78
  # tensorflow
79
  fonttools==4.60.1
80
  # via matplotlib
 
 
 
 
81
  fsspec==2025.9.0
82
  # via
83
  # etils
84
+ # gradio-client
85
+ # huggingface-hub
86
  gast==0.6.0
87
  # via tensorflow
88
  gin-config==0.5.0
 
104
  # via
105
  # google-api-core
106
  # tensorflow-metadata
107
+ gradio==5.49.1
108
  # via classify-text-with-bert-hate-speech (pyproject.toml)
109
+ gradio-client==1.13.3
110
+ # via gradio
111
+ groovy==0.1.2
112
+ # via gradio
113
  grpcio==1.75.1
114
  # via
115
  # tensorboard
 
122
  # via
123
  # keras
124
  # tensorflow
125
+ hf-xet==1.1.10
126
+ # via huggingface-hub
127
  httpcore==1.0.9
128
  # via httpx
129
  httplib2==0.31.0
 
132
  # google-auth-httplib2
133
  # oauth2client
134
  httpx==0.28.1
135
+ # via
136
+ # gradio
137
+ # gradio-client
138
+ # safehttpx
139
+ huggingface-hub==0.35.3
140
+ # via
141
+ # gradio
142
+ # gradio-client
143
  idna==3.10
144
  # via
145
  # anyio
146
  # httpx
147
  # kaggle
148
  # requests
 
149
  immutabledict==4.2.1
150
  # via
151
  # tensorflow-datasets
 
153
  importlib-resources==6.5.2
154
  # via etils
155
  jinja2==3.1.6
156
+ # via gradio
 
 
157
  joblib==1.5.2
158
  # via scikit-learn
 
 
 
 
159
  kaggle==1.7.4.5
160
  # via tf-models-official
161
  keras==3.11.3
 
164
  # via matplotlib
165
  libclang==18.1.1
166
  # via tensorflow
 
 
167
  lxml==6.0.2
168
  # via sacrebleu
169
  markdown==3.9
170
  # via tensorboard
171
  markdown-it-py==2.2.0
172
+ # via rich
 
 
 
173
  markupsafe==3.0.3
174
  # via
175
  # gradio
176
  # jinja2
177
  # werkzeug
178
  matplotlib==3.10.7
179
+ # via tf-models-official
 
 
 
 
180
  mdurl==0.1.2
181
  # via markdown-it-py
182
  ml-dtypes==0.5.3
183
  # via
184
  # keras
185
  # tensorflow
 
 
 
 
186
  namex==0.1.0
187
  # via keras
 
 
188
  numpy==1.26.4
189
  # via
190
  # ai-edge-litert
 
221
  # via gradio
222
  packaging==25.0
223
  # via
224
+ # gradio
225
+ # gradio-client
226
+ # huggingface-hub
227
  # keras
228
  # matplotlib
229
  # tensorboard
 
241
  # via sacrebleu
242
  promise==2.3
243
  # via tensorflow-datasets
 
 
 
 
244
  proto-plus==1.26.1
245
  # via google-api-core
246
  protobuf==5.29.5
 
273
  # oauth2client
274
  pycocotools==2.0.10
275
  # via tf-models-official
276
+ pydantic==2.11.10
 
 
277
  # via
278
  # fastapi
279
  # gradio
280
+ pydantic-core==2.33.2
281
  # via pydantic
282
  pydub==0.25.1
283
  # via gradio
 
301
  pyyaml==6.0.3
302
  # via
303
  # gradio
304
+ # huggingface-hub
305
  # tf-models-official
 
 
 
 
306
  regex==2025.9.18
307
  # via sacrebleu
308
  requests==2.32.5
309
  # via
310
  # google-api-core
311
+ # huggingface-hub
312
  # kaggle
313
  # tensorflow
314
  # tensorflow-datasets
315
  rich==14.2.0
 
 
316
  # via
317
+ # keras
318
+ # typer
319
  rsa==4.9.1
320
  # via
321
  # google-auth
322
  # oauth2client
323
+ ruff==0.14.0
324
+ # via gradio
325
  sacrebleu==2.5.1
326
  # via tf-models-official
327
+ safehttpx==0.1.6
328
+ # via gradio
329
  scikit-learn==1.7.2
330
  # via seqeval
331
  scipy==1.16.2
332
  # via
333
  # scikit-learn
334
  # tf-models-official
335
+ semantic-version==2.10.0
336
+ # via gradio
337
  sentencepiece==0.2.1
338
  # via tf-models-official
339
  seqeval==1.2.2
 
343
  # kaggle
344
  # tensorboard
345
  # tensorflow
346
+ shellingham==1.5.4
347
+ # via typer
348
  simple-parsing==0.1.7
349
  # via tensorflow-datasets
350
  six==1.17.0
 
362
  sniffio==1.3.1
363
  # via anyio
364
  starlette==0.48.0
365
+ # via
366
+ # fastapi
367
+ # gradio
368
  tabulate==0.9.0
369
  # via sacrebleu
370
  tensorboard==2.19.0
 
408
  # via scikit-learn
409
  toml==0.10.2
410
  # via tensorflow-datasets
411
+ tomlkit==0.13.3
412
+ # via gradio
413
  tqdm==4.67.1
414
  # via
415
  # ai-edge-litert
416
  # etils
417
+ # huggingface-hub
418
  # kaggle
419
  # tensorflow-datasets
420
+ typer==0.19.2
421
+ # via gradio
422
  typing-extensions==4.15.0
423
  # via
424
  # ai-edge-litert
 
 
425
  # anyio
426
  # etils
427
  # fastapi
428
  # gradio
429
+ # gradio-client
430
  # grpcio
431
+ # huggingface-hub
432
  # optree
433
  # pydantic
434
  # pydantic-core
 
435
  # simple-parsing
436
  # starlette
437
  # tensorflow
438
+ # typer
439
  # typing-inspection
440
  typing-inspection==0.4.2
441
  # via pydantic
442
  tzdata==2025.2
443
  # via pandas
 
 
444
  uritemplate==4.2.0
445
  # via google-api-python-client
446
  urllib3==2.5.0
 
454
  # bleach
455
  # kaggle
456
  websockets==15.0.1
457
+ # via gradio-client
458
  werkzeug==3.1.3
459
  # via tensorboard
460
  wheel==0.45.1
 
464
  # dm-tree
465
  # tensorflow
466
  # tensorflow-datasets
 
 
467
  zipp==3.23.0
468
  # via etils
uv.lock CHANGED
@@ -201,7 +201,7 @@ dev = [
201
 
202
  [package.metadata]
203
  requires-dist = [
204
- { name = "gradio" },
205
  { name = "tensorflow-text" },
206
  { name = "tf-models-official" },
207
  ]
 
201
 
202
  [package.metadata]
203
  requires-dist = [
204
+ { name = "gradio", specifier = ">=5.0.0" },
205
  { name = "tensorflow-text" },
206
  { name = "tf-models-official" },
207
  ]