ptrdvn commited on
Commit
5fe7d4d
·
verified ·
1 Parent(s): 98786ba

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +35 -18
README.md CHANGED
@@ -36,16 +36,25 @@ def get_lang_preds(source_text, target_text):
36
  "target_predlang_conf": target_conf
37
  }
38
 
39
- def filter_file(file_path, num_proc=12):
40
- time.sleep(1.0)
41
- ds = load_dataset("json", data_files={"train": get_url(file_path)}, split="train")
42
- amount_before = len(ds)
 
43
  ds = ds.map(lambda x: get_lang_preds(x["source_text"], x["target_text"]), num_proc=num_proc)
44
  ds = ds.filter(lambda x: bool(x["source_lang"] == x["source_predlang_id"]) and bool(x["target_lang"] == x["target_predlang_id"]), num_proc=num_proc)
45
- amount_after = len(ds)
46
- return ds, amount_before, amount_after
47
 
48
  PARQUET_FOLDER = "./datasets/"
 
 
 
 
 
 
 
 
 
49
 
50
  def save_parquet(ds, file_path):
51
  ds.to_parquet(
@@ -56,10 +65,13 @@ get_url = lambda file_path: f"https://huggingface.co/datasets/MaLA-LM/mala-opus-
56
 
57
  def main():
58
 
59
- api = HfApi(token="YOUR_TOKEN_HERE")
60
  fs = HfFileSystem()
61
 
62
- lang_pairs = fs.ls("datasets/MaLA-LM/mala-opus-dedup-2410", detail=False)
 
 
 
 
63
 
64
  data_amounts = []
65
 
@@ -67,25 +79,30 @@ def main():
67
  file_paths = fs.ls(lang_pair,detail=False)
68
  num_uploads = 0
69
  for file_path in file_paths:
70
- ds, amount_before, amount_after = filter_file(file_path)
71
- data_amounts.append({"file_path": file_path, "amount_before": amount_before, "amount_after": amount_after})
72
  if len(ds) > 0:
73
  save_parquet(ds, file_path)
74
 
75
- # Upload to huggingface if there are more than 50 parquet files to mitigate rate limits
76
- if len(glob("./datasets/*/*.parquet")) > 50:
77
  print("Sleeping for 10 seconds")
78
  time.sleep(10)
79
- print("Uploading folder")
80
- api.upload_folder(folder_path=PARQUET_FOLDER, path_in_repo="data/", repo_id="ptrdvn/opus_lid_filtered", repo_type="dataset")
81
  os.system("rm -rf " + PARQUET_FOLDER)
82
 
83
- os.system("rm -rf ~/.cache/huggingface")
84
  print("Finished with", file_path.split("/")[-2] + ".parquet")
85
 
86
- api.upload_folder(folder_path=PARQUET_FOLDER, path_in_repo="data/", repo_id="ptrdvn/opus_lid_filtered", repo_type="dataset")
87
- Dataset.from_list(data_amounts).push_to_hub("ptrdvn/opus_lid_filtered_data_amounts")
88
 
89
- main()
 
 
 
 
 
 
 
90
 
91
  ```
 
36
  "target_predlang_conf": target_conf
37
  }
38
 
39
+ def filter_file(file_path, num_proc=24):
40
+ time.sleep(2.0)
41
+ temp_file = "./temp.jsonl"
42
+ os.system(f"wget {get_url(file_path)} -O {temp_file}")
43
+ ds = load_dataset("json", data_files={"train": temp_file}, split="train")
44
  ds = ds.map(lambda x: get_lang_preds(x["source_text"], x["target_text"]), num_proc=num_proc)
45
  ds = ds.filter(lambda x: bool(x["source_lang"] == x["source_predlang_id"]) and bool(x["target_lang"] == x["target_predlang_id"]), num_proc=num_proc)
46
+ return ds
 
47
 
48
  PARQUET_FOLDER = "./datasets/"
49
+ api = HfApi(token="hf_YOUR_HF_TOKEN")
50
+
51
+ def upload_folder():
52
+ api.upload_folder(
53
+ folder_path=PARQUET_FOLDER,
54
+ path_in_repo="data/", # Upload to a specific folder
55
+ repo_id="ptrdvn/opus_lid_filtered",
56
+ repo_type="dataset",
57
+ )
58
 
59
  def save_parquet(ds, file_path):
60
  ds.to_parquet(
 
65
 
66
  def main():
67
 
 
68
  fs = HfFileSystem()
69
 
70
+ lang_pairs = sorted(fs.ls("datasets/MaLA-LM/mala-opus-dedup-2410", detail=False))
71
+ prev_lang_pairs = sorted(fs.ls("datasets/ptrdvn/opus_lid_filtered/data", detail=False))
72
+ last_lang_pair = prev_lang_pairs[-1].split("/")[-1]
73
+ print(last_lang_pair)
74
+ lang_pairs = [x for x in lang_pairs if x.split("/")[-1] >= last_lang_pair]
75
 
76
  data_amounts = []
77
 
 
79
  file_paths = fs.ls(lang_pair,detail=False)
80
  num_uploads = 0
81
  for file_path in file_paths:
82
+ ds = filter_file(file_path)
 
83
  if len(ds) > 0:
84
  save_parquet(ds, file_path)
85
 
86
+ # Upload to huggingface if there are more than 20 parquet files to mitigate rate limits
87
+ if len(glob("./datasets/*/*.parquet")) > 20:
88
  print("Sleeping for 10 seconds")
89
  time.sleep(10)
90
+ print("Uploading folder", file_path.split("/")[-2])
91
+ upload_folder()
92
  os.system("rm -rf " + PARQUET_FOLDER)
93
 
94
+ os.system("rm -rf ~/.cache/huggingface/datasets")
95
  print("Finished with", file_path.split("/")[-2] + ".parquet")
96
 
97
+ upload_folder()
 
98
 
99
+ while True:
100
+ try:
101
+ main()
102
+ except Exception as e:
103
+ print("MAIN LOOP ERROR")
104
+ print(e)
105
+ time.sleep(180)
106
+ upload_folder()
107
 
108
  ```