Naela00 commited on
Commit
03d7ca5
ยท
1 Parent(s): 36993b6

adding a usecase example

Browse files
Files changed (2) hide show
  1. README.md +70 -6
  2. example_use.ipynb +469 -0
README.md CHANGED
@@ -118,13 +118,15 @@ Where:
118
  </tbody>
119
  </table>
120
 
121
- > e.g. `rec` is the model trained on an oversampled dataset for balance, with batches in an arbitrary order (`r`), and with CoT reasoning (`c`).
122
 
123
  ### Example Usage
124
 
 
 
125
  ```python
126
  import torch
127
- from transformers import AutoModelForCausalLM, AutoTokenizer
128
  from peft import PeftModel
129
 
130
  # Choose which adapter to load
@@ -132,24 +134,86 @@ target_adapter_name = "rec" # Among the following six configurations : "odc", "o
132
 
133
  # Load the base model
134
  base_model_name = "Qwen/Qwen3-4B"
135
- model = AutoModelForCausalLM.from_pretrained(base_model_name, device_map="auto")
136
- tokenizer = AutoTokenizer.from_pretrained(base_model_name)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
137
 
138
  # Load the specific adapter by name from the repository
139
  adapter_repo_id = "Naela00/ToxiFrench"
140
  model = PeftModel.from_pretrained(
141
  model,
142
  adapter_repo_id,
143
- adapter_name=target_adapter_name # Precise which experiment to load
144
  )
145
 
146
- print(f"Successfully loaded the '{target_adapter_name}' adapter!")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
147
  ```
148
 
149
  ---
150
 
151
  ## License
152
 
 
 
153
  This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
154
 
155
  ---
 
118
  </tbody>
119
  </table>
120
 
121
+ > e.g. `rec` is the model trained on an oversampled dataset for balance (`e`), with batches in an arbitrary order (`r`), and with CoT reasoning (`c`).
122
 
123
  ### Example Usage
124
 
125
+ You can find an example in [this notebook](example_use.ipynb).
126
+
127
  ```python
128
  import torch
129
+ from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig
130
  from peft import PeftModel
131
 
132
  # Choose which adapter to load
 
134
 
135
  # Load the base model
136
  base_model_name = "Qwen/Qwen3-4B"
137
+
138
+ # For small GPUs, use 4-bit quantization
139
+ bnb_config = BitsAndBytesConfig(**{
140
+ "load_in_4bit": True,
141
+ "bnb_4bit_use_double_quant": True,
142
+ "bnb_4bit_quant_type": "nf4",
143
+ "bnb_4bit_compute_dtype": torch.float16
144
+ })
145
+
146
+ # Load tokenizer
147
+ tokenizer = AutoTokenizer.from_pretrained(
148
+ base_model_name,
149
+ use_fast=True,
150
+ trust_remote_code=True
151
+ )
152
+ tokenizer.padding_side = 'left'
153
+
154
+ # Load model
155
+ model = AutoModelForCausalLM.from_pretrained(
156
+ base_model_name,
157
+ quantization_config=bnb_config,
158
+ trust_remote_code=True,
159
+ sliding_window=None,
160
+ )
161
+
162
+ # Resize the model's token embeddings to match the tokenizer's vocabulary size
163
+ model_embedding_size = model.get_input_embeddings().weight.size(0)
164
+ tokenizer_vocab_size = len(tokenizer)
165
+ model.resize_token_embeddings(tokenizer_vocab_size)
166
 
167
  # Load the specific adapter by name from the repository
168
  adapter_repo_id = "Naela00/ToxiFrench"
169
  model = PeftModel.from_pretrained(
170
  model,
171
  adapter_repo_id,
172
+ subfolder=target_adapter_name # Among the following six configurations : "odc", "oeb", "oec", "rdc", "reb", "rec"
173
  )
174
 
175
+ # Inference
176
+ message_to_analyze = "Je suis vraiment dรฉรงu par ce film, c'รฉtait nul !"
177
+ prompt = f"Message:\n{message_to_analyze}\n\nAnalyse:\n"
178
+ if "c" in target_adapter_name:
179
+ prompt += "<think>\nExplication :\n" # If using CoT, add the reasoning part
180
+
181
+ max_new_tokens: int = 1024
182
+ do_sample: bool = True
183
+ temperature: float = 0.7
184
+ top_p: float = 0.9
185
+ top_k: int = 50
186
+ repetition_penalty: float = 1.1
187
+
188
+ inputs = tokenizer(
189
+ prompt,
190
+ return_tensors="pt",
191
+ padding=True,
192
+ truncation=True
193
+ ).to(model.device)
194
+
195
+ default_generation_kwargs = {
196
+ "max_new_tokens": max_new_tokens,
197
+ "do_sample": do_sample,
198
+ "temperature": temperature,
199
+ "top_p": top_p,
200
+ "top_k": top_k,
201
+ "repetition_penalty": repetition_penalty,
202
+ "eos_token_id": tokenizer.eos_token_id,
203
+ }
204
+
205
+ outputs = model.generate(**inputs, **default_generation_kwargs)
206
+ generated_text = tokenizer.decode(outputs[0], skip_special_tokens=False)
207
+
208
+ print(generated_text)
209
  ```
210
 
211
  ---
212
 
213
  ## License
214
 
215
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg?style=flat-square)](./LICENSE)
216
+
217
  This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
218
 
219
  ---
example_use.ipynb ADDED
@@ -0,0 +1,469 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "id": "5946df15",
6
+ "metadata": {},
7
+ "source": [
8
+ "# Example of use of ToxiFrench model"
9
+ ]
10
+ },
11
+ {
12
+ "cell_type": "markdown",
13
+ "id": "209da6d2",
14
+ "metadata": {},
15
+ "source": [
16
+ "## Libraries"
17
+ ]
18
+ },
19
+ {
20
+ "cell_type": "code",
21
+ "execution_count": 1,
22
+ "id": "e421addd",
23
+ "metadata": {},
24
+ "outputs": [],
25
+ "source": [
26
+ "import torch\n",
27
+ "from transformers import AutoModelForCausalLM, AutoTokenizer, BitsAndBytesConfig\n",
28
+ "from peft import PeftModel\n",
29
+ "from rich.console import Console\n",
30
+ "from rich.panel import Panel\n",
31
+ "import os "
32
+ ]
33
+ },
34
+ {
35
+ "cell_type": "markdown",
36
+ "id": "332b3f5c",
37
+ "metadata": {},
38
+ "source": [
39
+ "## Global settings and variables"
40
+ ]
41
+ },
42
+ {
43
+ "cell_type": "code",
44
+ "execution_count": 2,
45
+ "id": "a14bba57",
46
+ "metadata": {},
47
+ "outputs": [],
48
+ "source": [
49
+ "# If you are using a proxy, set it up here (optional, you can comment these lines if not needed)\n",
50
+ "os.environ[\"HTTP_PROXY\"] = \"socks5h://127.0.0.1:1080\"\n",
51
+ "os.environ[\"HTTPS_PROXY\"] = \"socks5h://127.0.0.1:1080\"\n",
52
+ "\n",
53
+ "# Choose which adapter to load\n",
54
+ "target_adapter_name = \"rec\" # Among the following six configurations : \"odc\", \"oeb\", \"oec\", \"rdc\", \"reb\", \"rec\"\n",
55
+ "\n",
56
+ "# Load the base model\n",
57
+ "base_model_name = \"Qwen/Qwen3-4B\""
58
+ ]
59
+ },
60
+ {
61
+ "cell_type": "code",
62
+ "execution_count": 3,
63
+ "id": "b2a86231",
64
+ "metadata": {},
65
+ "outputs": [],
66
+ "source": [
67
+ "console = Console()"
68
+ ]
69
+ },
70
+ {
71
+ "cell_type": "code",
72
+ "execution_count": 4,
73
+ "id": "3bbaa5bf",
74
+ "metadata": {},
75
+ "outputs": [],
76
+ "source": [
77
+ "bnb_config = BitsAndBytesConfig(**{\n",
78
+ " \"load_in_4bit\": True,\n",
79
+ " \"bnb_4bit_use_double_quant\": True,\n",
80
+ " \"bnb_4bit_quant_type\": \"nf4\",\n",
81
+ " \"bnb_4bit_compute_dtype\": torch.float16\n",
82
+ " })"
83
+ ]
84
+ },
85
+ {
86
+ "cell_type": "markdown",
87
+ "id": "560465f5",
88
+ "metadata": {},
89
+ "source": [
90
+ "## Load the model"
91
+ ]
92
+ },
93
+ {
94
+ "cell_type": "code",
95
+ "execution_count": 5,
96
+ "id": "44494b9f",
97
+ "metadata": {},
98
+ "outputs": [],
99
+ "source": [
100
+ "tokenizer = AutoTokenizer.from_pretrained(\n",
101
+ " base_model_name,\n",
102
+ " use_fast=True,\n",
103
+ " trust_remote_code=True\n",
104
+ " )\n",
105
+ "\n",
106
+ "if tokenizer.pad_token is None:\n",
107
+ " tokenizer.pad_token = tokenizer.eos_token\n",
108
+ " print(\"Tokenizer `pad_token` was None, set to `eos_token`.\")\n",
109
+ "\n",
110
+ "tokenizer.padding_side = 'left' "
111
+ ]
112
+ },
113
+ {
114
+ "cell_type": "code",
115
+ "execution_count": 6,
116
+ "id": "4b90a147",
117
+ "metadata": {},
118
+ "outputs": [
119
+ {
120
+ "data": {
121
+ "application/vnd.jupyter.widget-view+json": {
122
+ "model_id": "6796b5b76bc2493daf8fdc0169de053f",
123
+ "version_major": 2,
124
+ "version_minor": 0
125
+ },
126
+ "text/plain": [
127
+ "Loading checkpoint shards: 0%| | 0/3 [00:00<?, ?it/s]"
128
+ ]
129
+ },
130
+ "metadata": {},
131
+ "output_type": "display_data"
132
+ }
133
+ ],
134
+ "source": [
135
+ "model = AutoModelForCausalLM.from_pretrained(\n",
136
+ " base_model_name,\n",
137
+ " quantization_config=bnb_config,\n",
138
+ " trust_remote_code=True,\n",
139
+ " sliding_window=None,\n",
140
+ " )\n",
141
+ "if model.generation_config.pad_token_id is None and tokenizer.pad_token_id is not None:\n",
142
+ " model.generation_config.pad_token_id = tokenizer.pad_token_id\n",
143
+ " print(\"Model `generation_config.pad_token_id` set from tokenizer.\")"
144
+ ]
145
+ },
146
+ {
147
+ "cell_type": "code",
148
+ "execution_count": 7,
149
+ "id": "5c105265",
150
+ "metadata": {},
151
+ "outputs": [
152
+ {
153
+ "data": {
154
+ "text/html": [
155
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"color: #808000; text-decoration-color: #808000\">Warning: Vocab size mismatch.</span>Model embeddings: <span style=\"color: #008080; text-decoration-color: #008080; font-weight: bold\">151936</span>, Tokenizer vocab: <span style=\"color: #008080; text-decoration-color: #008080; font-weight: bold\">151669</span>\n",
156
+ "</pre>\n"
157
+ ],
158
+ "text/plain": [
159
+ "\u001b[33mWarning: Vocab size mismatch.\u001b[0mModel embeddings: \u001b[1;36m151936\u001b[0m, Tokenizer vocab: \u001b[1;36m151669\u001b[0m\n"
160
+ ]
161
+ },
162
+ "metadata": {},
163
+ "output_type": "display_data"
164
+ },
165
+ {
166
+ "data": {
167
+ "text/html": [
168
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\">Resizing model token embeddings to match tokenizer<span style=\"color: #808000; text-decoration-color: #808000\">...</span>\n",
169
+ "</pre>\n"
170
+ ],
171
+ "text/plain": [
172
+ "Resizing model token embeddings to match tokenizer\u001b[33m...\u001b[0m\n"
173
+ ]
174
+ },
175
+ "metadata": {},
176
+ "output_type": "display_data"
177
+ },
178
+ {
179
+ "data": {
180
+ "text/html": [
181
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"color: #008000; text-decoration-color: #008000; font-weight: bold\">Resized model embeddings to: </span><span style=\"color: #008000; text-decoration-color: #008000; font-weight: bold\">151669</span>\n",
182
+ "</pre>\n"
183
+ ],
184
+ "text/plain": [
185
+ "\u001b[1;32mResized model embeddings to: \u001b[0m\u001b[1;32m151669\u001b[0m\n"
186
+ ]
187
+ },
188
+ "metadata": {},
189
+ "output_type": "display_data"
190
+ }
191
+ ],
192
+ "source": [
193
+ "special_tokens_to_add = {\n",
194
+ " \"additional_special_tokens\": [\n",
195
+ " ]\n",
196
+ " }\n",
197
+ "tokenizer.add_special_tokens(special_tokens_to_add)\n",
198
+ "\n",
199
+ "model_embedding_size = model.get_input_embeddings().weight.size(0)\n",
200
+ "tokenizer_vocab_size = len(tokenizer)\n",
201
+ "\n",
202
+ "if model_embedding_size != tokenizer_vocab_size:\n",
203
+ " console.print(f\"[yellow]Warning: Vocab size mismatch.[/yellow]\"\n",
204
+ " f\"Model embeddings: {model_embedding_size}, \"\n",
205
+ " f\"Tokenizer vocab: {tokenizer_vocab_size}\")\n",
206
+ " console.print(\"Resizing model token embeddings to match tokenizer...\")\n",
207
+ " model.resize_token_embeddings(tokenizer_vocab_size)\n",
208
+ "\n",
209
+ " # Verify the resize\n",
210
+ " new_embedding_size = model.get_input_embeddings().weight.size(0)\n",
211
+ " console.print(f\"[bold green]Resized model embeddings to: {new_embedding_size}[/bold green]\")\n",
212
+ "else:\n",
213
+ " console.print(\"Model embedding size and tokenizer vocabulary size are already aligned.\")"
214
+ ]
215
+ },
216
+ {
217
+ "cell_type": "code",
218
+ "execution_count": 8,
219
+ "id": "ed574fee",
220
+ "metadata": {},
221
+ "outputs": [
222
+ {
223
+ "name": "stdout",
224
+ "output_type": "stream",
225
+ "text": [
226
+ "Successfully loaded the 'rec' adapter!\n"
227
+ ]
228
+ }
229
+ ],
230
+ "source": [
231
+ "# Load the specific adapter by name from the repository\n",
232
+ "adapter_repo_id = \"Naela00/ToxiFrench\"\n",
233
+ "model = PeftModel.from_pretrained(\n",
234
+ " model,\n",
235
+ " adapter_repo_id,\n",
236
+ " subfolder=target_adapter_name\n",
237
+ ")\n",
238
+ "\n",
239
+ "print(f\"Successfully loaded the '{target_adapter_name}' adapter!\")"
240
+ ]
241
+ },
242
+ {
243
+ "cell_type": "markdown",
244
+ "id": "7e4e92f5",
245
+ "metadata": {},
246
+ "source": [
247
+ "## Example of inference"
248
+ ]
249
+ },
250
+ {
251
+ "cell_type": "code",
252
+ "execution_count": 9,
253
+ "id": "74b03044",
254
+ "metadata": {},
255
+ "outputs": [],
256
+ "source": [
257
+ "prompt = \"Message:\\nputain mais elle est vraiment grand remplacรฉe vot ville\\n\\nAnalyse:\\n\"\n",
258
+ "if \"c\" in target_adapter_name:\n",
259
+ " prompt += \"<think>\\nExplication :\\n\""
260
+ ]
261
+ },
262
+ {
263
+ "cell_type": "code",
264
+ "execution_count": 10,
265
+ "id": "74e4a114",
266
+ "metadata": {},
267
+ "outputs": [],
268
+ "source": [
269
+ "max_new_tokens: int = 1024\n",
270
+ "do_sample: bool = True\n",
271
+ "temperature: float = 0.7\n",
272
+ "top_p: float = 0.9\n",
273
+ "top_k: int = 50\n",
274
+ "repetition_penalty: float = 1.1"
275
+ ]
276
+ },
277
+ {
278
+ "cell_type": "code",
279
+ "execution_count": 11,
280
+ "id": "6d73a27e",
281
+ "metadata": {},
282
+ "outputs": [],
283
+ "source": [
284
+ "inputs = tokenizer(\n",
285
+ " prompt,\n",
286
+ " return_tensors=\"pt\",\n",
287
+ " padding=True,\n",
288
+ " truncation=True\n",
289
+ ").to(model.device)\n",
290
+ "\n",
291
+ "default_generation_kwargs = {\n",
292
+ " \"max_new_tokens\": max_new_tokens,\n",
293
+ " \"do_sample\": do_sample,\n",
294
+ " \"temperature\": temperature,\n",
295
+ " \"top_p\": top_p,\n",
296
+ " \"top_k\": top_k,\n",
297
+ " \"repetition_penalty\": repetition_penalty,\n",
298
+ " \"eos_token_id\": tokenizer.eos_token_id,\n",
299
+ "}\n",
300
+ "\n",
301
+ "outputs = model.generate(**inputs, **default_generation_kwargs)\n",
302
+ "generated_text = tokenizer.decode(outputs[0], skip_special_tokens=False)"
303
+ ]
304
+ },
305
+ {
306
+ "cell_type": "code",
307
+ "execution_count": 12,
308
+ "id": "8f5c58a9",
309
+ "metadata": {},
310
+ "outputs": [
311
+ {
312
+ "data": {
313
+ "text/html": [
314
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"color: #00ff00; text-decoration-color: #00ff00\">โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ </span><span style=\"color: #800080; text-decoration-color: #800080; font-weight: bold\">Model Output</span><span style=\"color: #00ff00; text-decoration-color: #00ff00\"> โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€๏ฟฝ๏ฟฝโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€</span>\n",
315
+ "</pre>\n"
316
+ ],
317
+ "text/plain": [
318
+ "\u001b[92mโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€ \u001b[0m\u001b[1;35mModel Output\u001b[0m\u001b[92m โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€\u001b[0m\n"
319
+ ]
320
+ },
321
+ "metadata": {},
322
+ "output_type": "display_data"
323
+ },
324
+ {
325
+ "data": {
326
+ "text/html": [
327
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\">\n",
328
+ "\n",
329
+ "</pre>\n"
330
+ ],
331
+ "text/plain": [
332
+ "\n",
333
+ "\n"
334
+ ]
335
+ },
336
+ "metadata": {},
337
+ "output_type": "display_data"
338
+ },
339
+ {
340
+ "data": {
341
+ "text/html": [
342
+ "<pre style=\"white-space:pre;overflow-x:auto;line-height:normal;font-family:Menlo,'DejaVu Sans Mono',consolas,'Courier New',monospace\"><span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ•ญโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€</span> Generated Text <span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฎ</span>\n",
343
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Message: โ”‚</span>\n",
344
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ putain mais elle est vraiment grand remplacรฉe vot ville โ”‚</span>\n",
345
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ โ”‚</span>\n",
346
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Analyse: โ”‚</span>\n",
347
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ &lt;think&gt; โ”‚</span>\n",
348
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Explication : โ”‚</span>\n",
349
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ **Sujet du message :** Remplacement d'une personne dans une situation. โ”‚</span>\n",
350
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ โ”‚</span>\n",
351
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ **Rรฉsumรฉ et explication :** L'auteur exprime son indignation ou sa surprise face ร  un remplacement, en โ”‚</span>\n",
352
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ utilisant le terme \"grand\" qui pourrait indiquer une grande importance. Le contexte n'est pas prรฉcisรฉ. โ”‚</span>\n",
353
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ &lt;/think&gt; โ”‚</span>\n",
354
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ &lt;think&gt; โ”‚</span>\n",
355
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Tons : โ”‚</span>\n",
356
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Les tons perรงus sont : โ”‚</span>\n",
357
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ โ”‚</span>\n",
358
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ 1. **Indignรฉ (certitude รฉlevรฉe)** - L'utilisation de ยซ putain ยป montre une frustration claire face au โ”‚</span>\n",
359
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ remplacement. โ”‚</span>\n",
360
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ 2. **Surpris (certitude modรฉrรฉe)** - L'expression ยซ mais elle est vraiment ยป suggรจre une surprise quant ร  la โ”‚</span>\n",
361
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ situation. โ”‚</span>\n",
362
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ 3. **Informel (certitude รฉlevรฉe)** - Le langage familier et l'absence de formalitรฉ renforcent ce ton. โ”‚</span>\n",
363
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ โ”‚</span>\n",
364
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Doutes sur la signification exacte des termes, notamment ยซ grand ยป, pouvant indiquer une opinion personnelle. โ”‚</span>\n",
365
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ &lt;/think&gt; โ”‚</span>\n",
366
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ &lt;think&gt; โ”‚</span>\n",
367
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Labels : โ”‚</span>\n",
368
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ S0/H0/V0/R0/A0/I2 โ”‚</span>\n",
369
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ โ”‚</span>\n",
370
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Justification : โ”‚</span>\n",
371
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Le message contient une vulgaritรฉ lรฉgรจre avec un ton informel, sans allusion sexuelle ni violence, et manifeste โ”‚</span>\n",
372
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ une critique moqueuse ou dรฉsapprobatrice sans intention hostile directe. โ”‚</span>\n",
373
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ &lt;/think&gt; โ”‚</span>\n",
374
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ &lt;think&gt; โ”‚</span>\n",
375
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Score de toxicitรฉ : โ”‚</span>\n",
376
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ 4/10 โ”‚</span>\n",
377
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ โ”‚</span>\n",
378
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Justification : โ”‚</span>\n",
379
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ Vulgaritรฉ et condescendance prรฉsentes, mais sans haine explicite ni incitation grave. Ton moqueur peut heurter โ”‚</span>\n",
380
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ certaines sensibilitรฉs, mais reste limitรฉ. โ”‚</span>\n",
381
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ &lt;/think&gt; โ”‚</span>\n",
382
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ โ”‚</span>\n",
383
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ En conclusion, ce message est-il toxique ? โ”‚</span>\n",
384
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ โ”‚</span>\n",
385
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ”‚ non&lt;|im_end|&gt; โ”‚</span>\n",
386
+ "<span style=\"color: #c0c0c0; text-decoration-color: #c0c0c0; background-color: #000000; font-weight: bold\">โ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ</span>\n",
387
+ "</pre>\n"
388
+ ],
389
+ "text/plain": [
390
+ "\u001b[1;37;40mโ•ญโ”€\u001b[0m\u001b[1;37;40mโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€\u001b[0m Generated Text \u001b[1;37;40mโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€\u001b[0m\u001b[1;37;40mโ”€โ•ฎ\u001b[0m\n",
391
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mMessage:\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
392
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mputain mais elle est vraiment grand remplacรฉe vot ville\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
393
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
394
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mAnalyse:\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
395
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m<think>\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
396
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mExplication :\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
397
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m**Sujet du message :** Remplacement d'une personne dans une situation.\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
398
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
399
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m**Rรฉsumรฉ et explication :** L'auteur exprime son indignation ou sa surprise face ร  un remplacement, en \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
400
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mutilisant le terme \"grand\" qui pourrait indiquer une grande importance. Le contexte n'est pas prรฉcisรฉ.\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
401
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m</think>\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
402
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m<think>\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
403
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mTons :\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
404
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mLes tons perรงus sont :\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
405
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
406
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m1. **Indignรฉ (certitude รฉlevรฉe)** - L'utilisation de ยซ putain ยป montre une frustration claire face au \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
407
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mremplacement.\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
408
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m2. **Surpris (certitude modรฉrรฉe)** - L'expression ยซ mais elle est vraiment ยป suggรจre une surprise quant ร  la \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
409
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40msituation.\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
410
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m3. **Informel (certitude รฉlevรฉe)** - Le langage familier et l'absence de formalitรฉ renforcent ce ton.\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
411
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
412
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mDoutes sur la signification exacte des termes, notamment ยซ grand ยป, pouvant indiquer une opinion personnelle.\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
413
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m</think>\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
414
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m<think>\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
415
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mLabels :\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
416
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mS0/H0/V0/R0/A0/I2\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
417
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
418
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mJustification :\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
419
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mLe message contient une vulgaritรฉ lรฉgรจre avec un ton informel, sans allusion sexuelle ni violence, et manifeste\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
420
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mune critique moqueuse ou dรฉsapprobatrice sans intention hostile directe.\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
421
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m</think>\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
422
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m<think>\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
423
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mScore de toxicitรฉ :\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
424
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m4/10\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
425
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
426
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mJustification :\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
427
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mVulgaritรฉ et condescendance prรฉsentes, mais sans haine explicite ni incitation grave. Ton moqueur peut heurter \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
428
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mcertaines sensibilitรฉs, mais reste limitรฉ.\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
429
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m</think>\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
430
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
431
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mEn conclusion, ce message est-il toxique ?\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
432
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
433
+ "\u001b[1;37;40mโ”‚\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mnon<|im_end|>\u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40m \u001b[0m\u001b[1;37;40mโ”‚\u001b[0m\n",
434
+ "\u001b[1;37;40mโ•ฐโ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ”€โ•ฏ\u001b[0m\n"
435
+ ]
436
+ },
437
+ "metadata": {},
438
+ "output_type": "display_data"
439
+ }
440
+ ],
441
+ "source": [
442
+ "console.rule(\"[bold magenta]Model Output\")\n",
443
+ "console.print('\\n')\n",
444
+ "console.print(Panel.fit(generated_text, title=\"Generated Text\", style=\"bold white on black\"))"
445
+ ]
446
+ }
447
+ ],
448
+ "metadata": {
449
+ "kernelspec": {
450
+ "display_name": "SJTU",
451
+ "language": "python",
452
+ "name": "python3"
453
+ },
454
+ "language_info": {
455
+ "codemirror_mode": {
456
+ "name": "ipython",
457
+ "version": 3
458
+ },
459
+ "file_extension": ".py",
460
+ "mimetype": "text/x-python",
461
+ "name": "python",
462
+ "nbconvert_exporter": "python",
463
+ "pygments_lexer": "ipython3",
464
+ "version": "3.10.13"
465
+ }
466
+ },
467
+ "nbformat": 4,
468
+ "nbformat_minor": 5
469
+ }