flozi00 commited on
Commit
98049ce
·
verified ·
1 Parent(s): 15e23a9

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +122 -0
README.md CHANGED
@@ -17,3 +17,125 @@ configs:
17
  - split: train
18
  path: data/train-*
19
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  - split: train
18
  path: data/train-*
19
  ---
20
+
21
+ ```python
22
+
23
+ import os
24
+
25
+ import datasets
26
+ import torch
27
+ from transformers import ModernBertForSequenceClassification, pipeline
28
+
29
+ _GPU_ID = os.getenv("CUDA_VISIBLE_DEVICES", "0")
30
+
31
+
32
+ def load_model(gpu_index=0):
33
+ model = ModernBertForSequenceClassification.from_pretrained(
34
+ "flozi00/GermanEduScorer-ModernBERT-base",
35
+ reference_compile=False,
36
+ attn_implementation="sdpa",
37
+ ).to(torch.bfloat16)
38
+
39
+ model = torch.compile(model, dynamic=True, mode="max-autotune")
40
+
41
+ pipe = pipeline(
42
+ "text-classification",
43
+ model=model,
44
+ tokenizer="flozi00/GermanEduScorer-ModernBERT-base",
45
+ device=gpu_index,
46
+ torch_dtype=torch.bfloat16,
47
+ )
48
+
49
+ return pipe
50
+
51
+ pipe0 = load_model(0)
52
+ tokenizer_kwargs = {"truncation": True}
53
+
54
+ BAD_WORDS = [
55
+ "Sofort lieferbar",
56
+ ]
57
+
58
+
59
+ def process_chunk(pipe, texts):
60
+ if not texts:
61
+ return []
62
+ return [
63
+ int(x["label"])
64
+ for x in pipe(
65
+ texts,
66
+ batch_size=256,
67
+ truncation=True,
68
+ max_length=1024,
69
+ )
70
+ ]
71
+
72
+ def classification_wrapper(text_list: list):
73
+ return process_chunk(pipe0, text_list)
74
+
75
+ def map_edu(example):
76
+ example["content"] = example["text"]
77
+ example["label"] = classification_wrapper(example["text"])
78
+ return example
79
+
80
+ for SET_ID in ["0", "1", "2", "3"]:
81
+ base_url = "https://huggingface.co/datasets/HuggingFaceFW/fineweb-2/resolve/main/data/deu_Latn/train/"
82
+ data_files = {
83
+ "train": [base_url + f"00{SET_ID}_0000{i}.parquet" for i in range(10)]
84
+ + [base_url + f"00{SET_ID}_000{i}.parquet" for i in range(10, 38)]
85
+ }
86
+
87
+ fineweb = datasets.load_dataset(
88
+ "parquet",
89
+ data_files=data_files,
90
+ split="train",
91
+ num_proc=4,
92
+ cache_dir=f"./cache_fineweb_{SET_ID}",
93
+ )
94
+
95
+ chunk_size = 100_000
96
+ part_size = len(fineweb) // 4
97
+ total_samples = part_size * (int(_GPU_ID) + 1)
98
+ output_path = f"fineweb2_edu_4up_german_split_{int(SET_ID)+1}-of-4"
99
+
100
+ for i in range(part_size * int(_GPU_ID), total_samples, chunk_size):
101
+ end_idx = min(i + chunk_size, total_samples)
102
+ checkpoint_path = f"chunks/{output_path}_chunk_{i}"
103
+
104
+ # Try to load existing chunk
105
+ try:
106
+ dset = datasets.load_from_disk(checkpoint_path)
107
+ print(f"Chunk {i} to {end_idx} already processed, skipping...")
108
+ continue
109
+ except Exception:
110
+ print(f"Processing chunk {i} to {end_idx} of {total_samples}")
111
+
112
+ chunk = fineweb.select(range(i, end_idx))
113
+ processed_chunk = chunk.map(
114
+ map_edu,
115
+ remove_columns=chunk.column_names,
116
+ batch_size=1024,
117
+ batched=True,
118
+ ).filter(lambda x: x["label"] >= 4, num_proc=8)
119
+ processed_chunk = processed_chunk.rename_column("content", "text")
120
+
121
+ processed_chunk.save_to_disk(checkpoint_path)
122
+ print(f"Saved checkpoint to {checkpoint_path}")
123
+
124
+ if i % 1_000_000 == 0 and _GPU_ID == "0" and i > 0:
125
+ sets_to_push = []
126
+ # list all folders in the chunks directory
127
+ for folder in os.listdir("chunks"):
128
+ # load the dataset
129
+ sets_to_push.append(datasets.load_from_disk(f"chunks/{folder}"))
130
+ state_ds = datasets.concatenate_datasets(sets_to_push)
131
+ for bad_word in BAD_WORDS:
132
+ state_ds = state_ds.filter(
133
+ lambda x: bad_word not in x["text"], num_proc=8
134
+ )
135
+ state_ds = state_ds.filter(
136
+ lambda x: len(x["text"]) > 1024 and len(x["text"]) <= 100_000,
137
+ num_proc=8,
138
+ )
139
+ state_ds.push_to_hub("Fineweb2-German-Eduscore-4andMore")
140
+
141
+ ```