Commit
·
b4c35f6
1
Parent(s):
f5e30d5
ruff format
Browse files- gpt_oss_minimal.py +3 -3
gpt_oss_minimal.py
CHANGED
@@ -34,12 +34,12 @@ Usage:
|
|
34 |
import argparse
|
35 |
import os
|
36 |
import sys
|
|
|
37 |
|
38 |
import torch
|
39 |
from datasets import Dataset, load_dataset
|
40 |
from huggingface_hub import DatasetCard, get_token, login
|
41 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
42 |
-
from datetime import datetime
|
43 |
|
44 |
# Enable fast downloads
|
45 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
@@ -197,7 +197,7 @@ def main():
|
|
197 |
# Process each example
|
198 |
results = []
|
199 |
generation_start_time = datetime.now().isoformat()
|
200 |
-
|
201 |
for i, example in enumerate(dataset):
|
202 |
print(f"[{i + 1}/{len(dataset)}] Processing...")
|
203 |
|
@@ -253,7 +253,7 @@ def main():
|
|
253 |
|
254 |
print(f"Pushing to {args.output_dataset}...")
|
255 |
output_dataset.push_to_hub(args.output_dataset, token=token)
|
256 |
-
|
257 |
# Create and push dataset card
|
258 |
print("Creating dataset card...")
|
259 |
card_content = create_dataset_card(
|
|
|
34 |
import argparse
|
35 |
import os
|
36 |
import sys
|
37 |
+
from datetime import datetime
|
38 |
|
39 |
import torch
|
40 |
from datasets import Dataset, load_dataset
|
41 |
from huggingface_hub import DatasetCard, get_token, login
|
42 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
|
43 |
|
44 |
# Enable fast downloads
|
45 |
os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "1"
|
|
|
197 |
# Process each example
|
198 |
results = []
|
199 |
generation_start_time = datetime.now().isoformat()
|
200 |
+
|
201 |
for i, example in enumerate(dataset):
|
202 |
print(f"[{i + 1}/{len(dataset)}] Processing...")
|
203 |
|
|
|
253 |
|
254 |
print(f"Pushing to {args.output_dataset}...")
|
255 |
output_dataset.push_to_hub(args.output_dataset, token=token)
|
256 |
+
|
257 |
# Create and push dataset card
|
258 |
print("Creating dataset card...")
|
259 |
card_content = create_dataset_card(
|