import json import os import gzip import requests import pandas as pd urls = { 'dev1': 'https://home.ttic.edu/~kgimpel/comsense_resources/dev1.txt.gz', 'dev2': 'https://home.ttic.edu/~kgimpel/comsense_resources/dev2.txt.gz', 'test': 'https://home.ttic.edu/~kgimpel/comsense_resources/test.txt.gz' } def wget(url, cache_dir: str = './cache'): """ wget and uncompress data_iterator """ os.makedirs(cache_dir, exist_ok=True) filename = os.path.basename(url) path = f'{cache_dir}/{filename}' if os.path.exists(path): return path.replace('.gz', '') with open(path, "wb") as f: r = requests.get(url) f.write(r.content) with gzip.open(path, 'rb') as f: with open(path.replace('.gz', ''), 'wb') as f_write: f_write.write(f.read()) os.remove(path) return path.replace('.gz', '') def read_file(file_name): with open(file_name) as f_reader: df = pd.DataFrame([i.split('\t') for i in f_reader.read().split('\n') if len(i) > 0], columns=['relation', 'head', 'tail', 'flag']) df_positive = df[df['flag'] == '1'] df_negative = df[df['flag'] == '0'] df_positive.pop('flag') df_negative.pop('flag') return df_positive, df_negative if __name__ == '__main__': test_p, test_n = read_file(wget(urls['test'])) dev1_p, dev1_n = read_file(wget(urls['dev1'])) train_p = pd.concat([test_p, dev1_p]) train_n = pd.concat([test_n, dev1_n]) with open(f'dataset/train.jsonl', 'w') as f: for relation, df_p in train_p.groupby('relation'): if len(df_p) < 2: continue df_n = train_n[train_n['relation'] == relation] f.write(json.dumps({ 'relation_type': relation, 'positives': df_p[['head', 'tail']].to_numpy().tolist(), 'negatives': df_n[['head', 'tail']].to_numpy().tolist() }) + '\n') dev2_p, dev2_n = read_file(wget(urls['dev2'])) with open(f'dataset/valid.jsonl', 'w') as f: for relation, df_p in dev2_p.groupby('relation'): if len(df_p) < 2: continue df_n = dev2_n[dev2_n['relation'] == relation] f.write(json.dumps({ 'relation_type': relation, 'positives': df_p[['head', 'tail']].to_numpy().tolist(), 'negatives': df_n[['head', 'tail']].to_numpy().tolist() }) + '\n')