klamike commited on
Commit
e736a92
·
verified ·
1 Parent(s): 27f74ad

Add files using upload-large-folder tool

Browse files
PGLearn-Small-14_ieee-nminus1.py ADDED
@@ -0,0 +1,427 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+ from dataclasses import dataclass
3
+ from pathlib import Path
4
+ import json
5
+ import shutil
6
+
7
+ import datasets as hfd
8
+ import h5py
9
+ import pgzip as gzip
10
+ import pyarrow as pa
11
+
12
+ # ┌──────────────┐
13
+ # │ Metadata │
14
+ # └──────────────┘
15
+
16
+ @dataclass
17
+ class CaseSizes:
18
+ n_bus: int
19
+ n_load: int
20
+ n_gen: int
21
+ n_branch: int
22
+
23
+ CASENAME = "14_ieee-nminus1"
24
+ SIZES = CaseSizes(n_bus=14, n_load=11, n_gen=5, n_branch=20)
25
+ NUM_TRAIN = 317691
26
+ NUM_TEST = 79423
27
+ NUM_INFEASIBLE = 102886
28
+ SPLITFILES = {}
29
+
30
+ URL = "https://huggingface.co/datasets/PGLearn/PGLearn-Small-14_ieee-nminus1"
31
+ DESCRIPTION = """\
32
+ The 14_ieee-nminus1 PGLearn optimal power flow dataset, part of the PGLearn-Small collection. \
33
+ """
34
+ VERSION = hfd.Version("1.0.0")
35
+ DEFAULT_CONFIG_DESCRIPTION="""\
36
+ This configuration contains feasible input, primal solution, and dual solution data \
37
+ for the ACOPF and DCOPF formulations on the {case} system. For case data, \
38
+ download the case.json.gz file from the `script` branch of the repository. \
39
+ https://huggingface.co/datasets/PGLearn/PGLearn-Small-14_ieee-nminus1/blob/script/case.json.gz
40
+ """
41
+ USE_ML4OPF_WARNING = """
42
+ ================================================================================================
43
+ Loading PGLearn-Small-14_ieee-nminus1 through the `datasets.load_dataset` function may be slow.
44
+
45
+ Consider using ML4OPF to directly convert to `torch.Tensor`; for more info see:
46
+ https://github.com/AI4OPT/ML4OPF?tab=readme-ov-file#manually-loading-data
47
+
48
+ Or, use `huggingface_hub.snapshot_download` and an HDF5 reader; for more info see:
49
+ https://huggingface.co/datasets/PGLearn/PGLearn-Small-14_ieee-nminus1#downloading-individual-files
50
+ ================================================================================================
51
+ """
52
+ CITATION = """\
53
+ @article{klamkinpglearn,
54
+ title={{PGLearn - An Open-Source Learning Toolkit for Optimal Power Flow}},
55
+ author={Klamkin, Michael and Tanneau, Mathieu and Van Hentenryck, Pascal},
56
+ year={2025},
57
+ }\
58
+ """
59
+
60
+ IS_COMPRESSED = True
61
+
62
+ # ┌──────────────────┐
63
+ # │ Formulations │
64
+ # └──────────────────┘
65
+
66
+ def acopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool):
67
+ features = {}
68
+ if primal: features.update(acopf_primal_features(sizes))
69
+ if dual: features.update(acopf_dual_features(sizes))
70
+ if meta: features.update({f"ACOPF/{k}": v for k, v in META_FEATURES.items()})
71
+ return features
72
+
73
+ def dcopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool):
74
+ features = {}
75
+ if primal: features.update(dcopf_primal_features(sizes))
76
+ if dual: features.update(dcopf_dual_features(sizes))
77
+ if meta: features.update({f"DCOPF/{k}": v for k, v in META_FEATURES.items()})
78
+ return features
79
+
80
+ def socopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool):
81
+ features = {}
82
+ if primal: features.update(socopf_primal_features(sizes))
83
+ if dual: features.update(socopf_dual_features(sizes))
84
+ if meta: features.update({f"SOCOPF/{k}": v for k, v in META_FEATURES.items()})
85
+ return features
86
+
87
+ FORMULATIONS_TO_FEATURES = {
88
+ "ACOPF": acopf_features,
89
+ "DCOPF": dcopf_features,
90
+ "SOCOPF": socopf_features,
91
+ }
92
+
93
+ # ┌───────────────────┐
94
+ # │ BuilderConfig │
95
+ # └───────────────────┘
96
+
97
+ class PGLearnSmall14_ieeeNminus1Config(hfd.BuilderConfig):
98
+ """BuilderConfig for PGLearn-Small-14_ieee-nminus1.
99
+ By default, primal solution data, metadata, input, casejson, are included for the train and test splits.
100
+
101
+ To modify the default configuration, pass attributes of this class to `datasets.load_dataset`:
102
+
103
+ Attributes:
104
+ formulations (list[str]): The formulation(s) to include, e.g. ["ACOPF", "DCOPF"]
105
+ primal (bool, optional): Include primal solution data. Defaults to True.
106
+ dual (bool, optional): Include dual solution data. Defaults to False.
107
+ meta (bool, optional): Include metadata. Defaults to True.
108
+ input (bool, optional): Include input data. Defaults to True.
109
+ casejson (bool, optional): Include case.json data. Defaults to True.
110
+ train (bool, optional): Include training samples. Defaults to True.
111
+ test (bool, optional): Include testing samples. Defaults to True.
112
+ infeasible (bool, optional): Include infeasible samples. Defaults to False.
113
+ """
114
+ def __init__(self,
115
+ formulations: list[str],
116
+ primal: bool=True, dual: bool=False, meta: bool=True, input: bool = True, casejson: bool=True,
117
+ train: bool=True, test: bool=True, infeasible: bool=False,
118
+ compressed: bool=IS_COMPRESSED, **kwargs
119
+ ):
120
+ super(PGLearnSmall14_ieeeNminus1Config, self).__init__(version=VERSION, **kwargs)
121
+
122
+ self.case = CASENAME
123
+ self.formulations = formulations
124
+
125
+ self.primal = primal
126
+ self.dual = dual
127
+ self.meta = meta
128
+ self.input = input
129
+ self.casejson = casejson
130
+
131
+ self.train = train
132
+ self.test = test
133
+ self.infeasible = infeasible
134
+
135
+ self.gz_ext = ".gz" if compressed else ""
136
+
137
+ @property
138
+ def size(self):
139
+ return SIZES
140
+
141
+ @property
142
+ def features(self):
143
+ features = {}
144
+ if self.casejson: features.update(case_features())
145
+ if self.input: features.update(input_features(SIZES))
146
+ for formulation in self.formulations:
147
+ features.update(FORMULATIONS_TO_FEATURES[formulation](SIZES, self.primal, self.dual, self.meta))
148
+ return hfd.Features(features)
149
+
150
+ @property
151
+ def splits(self):
152
+ splits: dict[hfd.Split, dict[str, str | int]] = {}
153
+ if self.train:
154
+ splits[hfd.Split.TRAIN] = {
155
+ "name": "train",
156
+ "num_examples": NUM_TRAIN
157
+ }
158
+ if self.test:
159
+ splits[hfd.Split.TEST] = {
160
+ "name": "test",
161
+ "num_examples": NUM_TEST
162
+ }
163
+ if self.infeasible:
164
+ splits[hfd.Split("infeasible")] = {
165
+ "name": "infeasible",
166
+ "num_examples": NUM_INFEASIBLE
167
+ }
168
+ return splits
169
+
170
+ @property
171
+ def urls(self):
172
+ urls: dict[str, None | str | list] = {
173
+ "case": None, "train": [], "test": [], "infeasible": [],
174
+ }
175
+
176
+ if self.casejson:
177
+ urls["case"] = f"case.json" + self.gz_ext
178
+ else:
179
+ urls.pop("case")
180
+
181
+ split_names = []
182
+ if self.train: split_names.append("train")
183
+ if self.test: split_names.append("test")
184
+ if self.infeasible: split_names.append("infeasible")
185
+
186
+ for split in split_names:
187
+ if self.input: urls[split].append(f"{split}/input.h5" + self.gz_ext)
188
+ for formulation in self.formulations:
189
+ if self.primal:
190
+ filename = f"{split}/{formulation}/primal.h5" + self.gz_ext
191
+ if filename in SPLITFILES: urls[split].append(SPLITFILES[filename])
192
+ else: urls[split].append(filename)
193
+ if self.dual:
194
+ filename = f"{split}/{formulation}/dual.h5" + self.gz_ext
195
+ if filename in SPLITFILES: urls[split].append(SPLITFILES[filename])
196
+ else: urls[split].append(filename)
197
+ if self.meta:
198
+ filename = f"{split}/{formulation}/meta.h5" + self.gz_ext
199
+ if filename in SPLITFILES: urls[split].append(SPLITFILES[filename])
200
+ else: urls[split].append(filename)
201
+ return urls
202
+
203
+ # ┌────────────────────┐
204
+ # │ DatasetBuilder │
205
+ # └────────────────────┘
206
+
207
+ class PGLearnSmall14_ieeeNminus1(hfd.ArrowBasedBuilder):
208
+ """DatasetBuilder for PGLearn-Small-14_ieee-nminus1.
209
+ The main interface is `datasets.load_dataset` with `trust_remote_code=True`, e.g.
210
+
211
+ ```python
212
+ from datasets import load_dataset
213
+ ds = load_dataset("PGLearn/PGLearn-Small-14_ieee-nminus1", trust_remote_code=True,
214
+ # modify the default configuration by passing kwargs
215
+ formulations=["DCOPF"],
216
+ dual=False,
217
+ meta=False,
218
+ )
219
+ ```
220
+ """
221
+
222
+ DEFAULT_WRITER_BATCH_SIZE = 10000
223
+ BUILDER_CONFIG_CLASS = PGLearnSmall14_ieeeNminus1Config
224
+ DEFAULT_CONFIG_NAME=CASENAME
225
+ BUILDER_CONFIGS = [
226
+ PGLearnSmall14_ieeeNminus1Config(
227
+ name=CASENAME, description=DEFAULT_CONFIG_DESCRIPTION.format(case=CASENAME),
228
+ formulations=list(FORMULATIONS_TO_FEATURES.keys()),
229
+ primal=True, dual=True, meta=True, input=True, casejson=False,
230
+ train=True, test=True, infeasible=False,
231
+ )
232
+ ]
233
+
234
+ def _info(self):
235
+ return hfd.DatasetInfo(
236
+ features=self.config.features, splits=self.config.splits,
237
+ description=DESCRIPTION + self.config.description,
238
+ homepage=URL, citation=CITATION,
239
+ )
240
+
241
+ def _split_generators(self, dl_manager: hfd.DownloadManager):
242
+ hfd.logging.get_logger().warning(USE_ML4OPF_WARNING)
243
+
244
+ filepaths = dl_manager.download_and_extract(self.config.urls)
245
+
246
+ splits: list[hfd.SplitGenerator] = []
247
+ if self.config.train:
248
+ splits.append(hfd.SplitGenerator(
249
+ name=hfd.Split.TRAIN,
250
+ gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["train"]), n_samples=NUM_TRAIN),
251
+ ))
252
+ if self.config.test:
253
+ splits.append(hfd.SplitGenerator(
254
+ name=hfd.Split.TEST,
255
+ gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["test"]), n_samples=NUM_TEST),
256
+ ))
257
+ if self.config.infeasible:
258
+ splits.append(hfd.SplitGenerator(
259
+ name=hfd.Split("infeasible"),
260
+ gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["infeasible"]), n_samples=NUM_INFEASIBLE),
261
+ ))
262
+ return splits
263
+
264
+ def _generate_tables(self, case_file: str | None, data_files: tuple[hfd.utils.track.tracked_str | list[hfd.utils.track.tracked_str]], n_samples: int):
265
+ case_data: str | None = json.dumps(json.load(open_maybe_gzip_cat(case_file))) if case_file is not None else None
266
+ data: dict[str, h5py.File] = {}
267
+ for file in data_files:
268
+ v = h5py.File(open_maybe_gzip_cat(file), "r")
269
+ if isinstance(file, list):
270
+ k = "/".join(Path(file[0].get_origin()).parts[-3:-1]).split(".")[0]
271
+ else:
272
+ k = "/".join(Path(file.get_origin()).parts[-2:]).split(".")[0]
273
+ data[k] = v
274
+ for k in list(data.keys()):
275
+ if "/input" in k: data[k.split("/", 1)[1]] = data.pop(k)
276
+
277
+ batch_size = self._writer_batch_size or self.DEFAULT_WRITER_BATCH_SIZE
278
+ for i in range(0, n_samples, batch_size):
279
+ effective_batch_size = min(batch_size, n_samples - i)
280
+
281
+ sample_data = {
282
+ f"{dk}/{k}":
283
+ hfd.features.features.numpy_to_pyarrow_listarray(v[i:i + effective_batch_size, ...])
284
+ for dk, d in data.items() for k, v in d.items() if f"{dk}/{k}" in self.config.features
285
+ }
286
+
287
+ if case_data is not None:
288
+ sample_data["case/json"] = pa.array([case_data] * effective_batch_size)
289
+
290
+ yield i, pa.Table.from_pydict(sample_data)
291
+
292
+ for f in data.values():
293
+ f.close()
294
+
295
+ # ┌──────────────┐
296
+ # │ Features │
297
+ # └──────────────┘
298
+
299
+ FLOAT_TYPE = "float32"
300
+ INT_TYPE = "int64"
301
+ BOOL_TYPE = "bool"
302
+ STRING_TYPE = "string"
303
+
304
+ def case_features():
305
+ # FIXME: better way to share schema of case data -- need to treat jagged arrays
306
+ return {
307
+ "case/json": hfd.Value(STRING_TYPE),
308
+ }
309
+
310
+ META_FEATURES = {
311
+ "meta/seed": hfd.Value(dtype=INT_TYPE),
312
+ "meta/formulation": hfd.Value(dtype=STRING_TYPE),
313
+ "meta/primal_objective_value": hfd.Value(dtype=FLOAT_TYPE),
314
+ "meta/dual_objective_value": hfd.Value(dtype=FLOAT_TYPE),
315
+ "meta/primal_status": hfd.Value(dtype=STRING_TYPE),
316
+ "meta/dual_status": hfd.Value(dtype=STRING_TYPE),
317
+ "meta/termination_status": hfd.Value(dtype=STRING_TYPE),
318
+ "meta/build_time": hfd.Value(dtype=FLOAT_TYPE),
319
+ "meta/extract_time": hfd.Value(dtype=FLOAT_TYPE),
320
+ "meta/solve_time": hfd.Value(dtype=FLOAT_TYPE),
321
+ }
322
+
323
+ def input_features(sizes: CaseSizes):
324
+ return {
325
+ "input/pd": hfd.Sequence(length=sizes.n_load, feature=hfd.Value(dtype=FLOAT_TYPE)),
326
+ "input/qd": hfd.Sequence(length=sizes.n_load, feature=hfd.Value(dtype=FLOAT_TYPE)),
327
+ "input/gen_status": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=BOOL_TYPE)),
328
+ "input/branch_status": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=BOOL_TYPE)),
329
+ "input/seed": hfd.Value(dtype=INT_TYPE),
330
+ }
331
+
332
+ def acopf_primal_features(sizes: CaseSizes):
333
+ return {
334
+ "ACOPF/primal/vm": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
335
+ "ACOPF/primal/va": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
336
+ "ACOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
337
+ "ACOPF/primal/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
338
+ "ACOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
339
+ "ACOPF/primal/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
340
+ "ACOPF/primal/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
341
+ "ACOPF/primal/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
342
+ }
343
+ def acopf_dual_features(sizes: CaseSizes):
344
+ return {
345
+ "ACOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
346
+ "ACOPF/dual/kcl_q": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
347
+ "ACOPF/dual/vm": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
348
+ "ACOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
349
+ "ACOPF/dual/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
350
+ "ACOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
351
+ "ACOPF/dual/ohm_pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
352
+ "ACOPF/dual/ohm_qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
353
+ "ACOPF/dual/ohm_qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
354
+ "ACOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
355
+ "ACOPF/dual/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
356
+ "ACOPF/dual/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
357
+ "ACOPF/dual/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
358
+ "ACOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
359
+ "ACOPF/dual/sm_fr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
360
+ "ACOPF/dual/sm_to": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
361
+ "ACOPF/dual/slack_bus": hfd.Value(dtype=FLOAT_TYPE),
362
+ }
363
+ def dcopf_primal_features(sizes: CaseSizes):
364
+ return {
365
+ "DCOPF/primal/va": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
366
+ "DCOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
367
+ "DCOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
368
+ }
369
+ def dcopf_dual_features(sizes: CaseSizes):
370
+ return {
371
+ "DCOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
372
+ "DCOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
373
+ "DCOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
374
+ "DCOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
375
+ "DCOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
376
+ "DCOPF/dual/slack_bus": hfd.Value(dtype=FLOAT_TYPE),
377
+ }
378
+ def socopf_primal_features(sizes: CaseSizes):
379
+ return {
380
+ "SOCOPF/primal/w": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
381
+ "SOCOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
382
+ "SOCOPF/primal/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
383
+ "SOCOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
384
+ "SOCOPF/primal/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
385
+ "SOCOPF/primal/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
386
+ "SOCOPF/primal/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
387
+ "SOCOPF/primal/wr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
388
+ "SOCOPF/primal/wi": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
389
+ }
390
+ def socopf_dual_features(sizes: CaseSizes):
391
+ return {
392
+ "SOCOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
393
+ "SOCOPF/dual/kcl_q": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
394
+ "SOCOPF/dual/w": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)),
395
+ "SOCOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
396
+ "SOCOPF/dual/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)),
397
+ "SOCOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
398
+ "SOCOPF/dual/ohm_pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
399
+ "SOCOPF/dual/ohm_qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
400
+ "SOCOPF/dual/ohm_qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
401
+ "SOCOPF/dual/jabr": hfd.Array2D(shape=(sizes.n_branch, 4), dtype=FLOAT_TYPE),
402
+ "SOCOPF/dual/sm_fr": hfd.Array2D(shape=(sizes.n_branch, 3), dtype=FLOAT_TYPE),
403
+ "SOCOPF/dual/sm_to": hfd.Array2D(shape=(sizes.n_branch, 3), dtype=FLOAT_TYPE),
404
+ "SOCOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
405
+ "SOCOPF/dual/wr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
406
+ "SOCOPF/dual/wi": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
407
+ "SOCOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
408
+ "SOCOPF/dual/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
409
+ "SOCOPF/dual/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
410
+ "SOCOPF/dual/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)),
411
+ }
412
+
413
+ # ┌───────────────┐
414
+ # │ Utilities │
415
+ # └───────────────┘
416
+
417
+ def open_maybe_gzip_cat(path: str | list):
418
+ if isinstance(path, list):
419
+ dest = Path(path[0]).parent.with_suffix(".h5")
420
+ if not dest.exists():
421
+ with open(dest, "wb") as dest_f:
422
+ for piece in path:
423
+ with open(piece, "rb") as piece_f:
424
+ shutil.copyfileobj(piece_f, dest_f)
425
+ shutil.rmtree(Path(piece).parent)
426
+ path = dest.as_posix()
427
+ return gzip.open(path, "rb") if path.endswith(".gz") else open(path, "rb")
README.md ADDED
@@ -0,0 +1,293 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: cc-by-sa-4.0
3
+ tags:
4
+ - energy
5
+ - optimization
6
+ - optimal_power_flow
7
+ - power_grid
8
+ pretty_name: PGLearn Optimal Power Flow (14_ieee, N-1)
9
+ task_categories:
10
+ - tabular-regression
11
+ dataset_info:
12
+ config_name: 14_ieee-nminus1
13
+ features:
14
+ - name: input/pd
15
+ sequence: float32
16
+ length: 11
17
+ - name: input/qd
18
+ sequence: float32
19
+ length: 11
20
+ - name: input/gen_status
21
+ sequence: bool
22
+ length: 5
23
+ - name: input/branch_status
24
+ sequence: bool
25
+ length: 20
26
+ - name: input/seed
27
+ dtype: int64
28
+ - name: ACOPF/primal/vm
29
+ sequence: float32
30
+ length: 14
31
+ - name: ACOPF/primal/va
32
+ sequence: float32
33
+ length: 14
34
+ - name: ACOPF/primal/pg
35
+ sequence: float32
36
+ length: 5
37
+ - name: ACOPF/primal/qg
38
+ sequence: float32
39
+ length: 5
40
+ - name: ACOPF/primal/pf
41
+ sequence: float32
42
+ length: 20
43
+ - name: ACOPF/primal/pt
44
+ sequence: float32
45
+ length: 20
46
+ - name: ACOPF/primal/qf
47
+ sequence: float32
48
+ length: 20
49
+ - name: ACOPF/primal/qt
50
+ sequence: float32
51
+ length: 20
52
+ - name: ACOPF/dual/kcl_p
53
+ sequence: float32
54
+ length: 14
55
+ - name: ACOPF/dual/kcl_q
56
+ sequence: float32
57
+ length: 14
58
+ - name: ACOPF/dual/vm
59
+ sequence: float32
60
+ length: 14
61
+ - name: ACOPF/dual/pg
62
+ sequence: float32
63
+ length: 5
64
+ - name: ACOPF/dual/qg
65
+ sequence: float32
66
+ length: 5
67
+ - name: ACOPF/dual/ohm_pf
68
+ sequence: float32
69
+ length: 20
70
+ - name: ACOPF/dual/ohm_pt
71
+ sequence: float32
72
+ length: 20
73
+ - name: ACOPF/dual/ohm_qf
74
+ sequence: float32
75
+ length: 20
76
+ - name: ACOPF/dual/ohm_qt
77
+ sequence: float32
78
+ length: 20
79
+ - name: ACOPF/dual/pf
80
+ sequence: float32
81
+ length: 20
82
+ - name: ACOPF/dual/pt
83
+ sequence: float32
84
+ length: 20
85
+ - name: ACOPF/dual/qf
86
+ sequence: float32
87
+ length: 20
88
+ - name: ACOPF/dual/qt
89
+ sequence: float32
90
+ length: 20
91
+ - name: ACOPF/dual/va_diff
92
+ sequence: float32
93
+ length: 20
94
+ - name: ACOPF/dual/sm_fr
95
+ sequence: float32
96
+ length: 20
97
+ - name: ACOPF/dual/sm_to
98
+ sequence: float32
99
+ length: 20
100
+ - name: ACOPF/dual/slack_bus
101
+ dtype: float32
102
+ - name: ACOPF/meta/seed
103
+ dtype: int64
104
+ - name: ACOPF/meta/formulation
105
+ dtype: string
106
+ - name: ACOPF/meta/primal_objective_value
107
+ dtype: float32
108
+ - name: ACOPF/meta/dual_objective_value
109
+ dtype: float32
110
+ - name: ACOPF/meta/primal_status
111
+ dtype: string
112
+ - name: ACOPF/meta/dual_status
113
+ dtype: string
114
+ - name: ACOPF/meta/termination_status
115
+ dtype: string
116
+ - name: ACOPF/meta/build_time
117
+ dtype: float32
118
+ - name: ACOPF/meta/extract_time
119
+ dtype: float32
120
+ - name: ACOPF/meta/solve_time
121
+ dtype: float32
122
+ - name: DCOPF/primal/va
123
+ sequence: float32
124
+ length: 14
125
+ - name: DCOPF/primal/pg
126
+ sequence: float32
127
+ length: 5
128
+ - name: DCOPF/primal/pf
129
+ sequence: float32
130
+ length: 20
131
+ - name: DCOPF/dual/kcl_p
132
+ sequence: float32
133
+ length: 14
134
+ - name: DCOPF/dual/pg
135
+ sequence: float32
136
+ length: 5
137
+ - name: DCOPF/dual/ohm_pf
138
+ sequence: float32
139
+ length: 20
140
+ - name: DCOPF/dual/pf
141
+ sequence: float32
142
+ length: 20
143
+ - name: DCOPF/dual/va_diff
144
+ sequence: float32
145
+ length: 20
146
+ - name: DCOPF/dual/slack_bus
147
+ dtype: float32
148
+ - name: DCOPF/meta/seed
149
+ dtype: int64
150
+ - name: DCOPF/meta/formulation
151
+ dtype: string
152
+ - name: DCOPF/meta/primal_objective_value
153
+ dtype: float32
154
+ - name: DCOPF/meta/dual_objective_value
155
+ dtype: float32
156
+ - name: DCOPF/meta/primal_status
157
+ dtype: string
158
+ - name: DCOPF/meta/dual_status
159
+ dtype: string
160
+ - name: DCOPF/meta/termination_status
161
+ dtype: string
162
+ - name: DCOPF/meta/build_time
163
+ dtype: float32
164
+ - name: DCOPF/meta/extract_time
165
+ dtype: float32
166
+ - name: DCOPF/meta/solve_time
167
+ dtype: float32
168
+ - name: SOCOPF/primal/w
169
+ sequence: float32
170
+ length: 14
171
+ - name: SOCOPF/primal/pg
172
+ sequence: float32
173
+ length: 5
174
+ - name: SOCOPF/primal/qg
175
+ sequence: float32
176
+ length: 5
177
+ - name: SOCOPF/primal/pf
178
+ sequence: float32
179
+ length: 20
180
+ - name: SOCOPF/primal/pt
181
+ sequence: float32
182
+ length: 20
183
+ - name: SOCOPF/primal/qf
184
+ sequence: float32
185
+ length: 20
186
+ - name: SOCOPF/primal/qt
187
+ sequence: float32
188
+ length: 20
189
+ - name: SOCOPF/primal/wr
190
+ sequence: float32
191
+ length: 20
192
+ - name: SOCOPF/primal/wi
193
+ sequence: float32
194
+ length: 20
195
+ - name: SOCOPF/dual/kcl_p
196
+ sequence: float32
197
+ length: 14
198
+ - name: SOCOPF/dual/kcl_q
199
+ sequence: float32
200
+ length: 14
201
+ - name: SOCOPF/dual/w
202
+ sequence: float32
203
+ length: 14
204
+ - name: SOCOPF/dual/pg
205
+ sequence: float32
206
+ length: 5
207
+ - name: SOCOPF/dual/qg
208
+ sequence: float32
209
+ length: 5
210
+ - name: SOCOPF/dual/ohm_pf
211
+ sequence: float32
212
+ length: 20
213
+ - name: SOCOPF/dual/ohm_pt
214
+ sequence: float32
215
+ length: 20
216
+ - name: SOCOPF/dual/ohm_qf
217
+ sequence: float32
218
+ length: 20
219
+ - name: SOCOPF/dual/ohm_qt
220
+ sequence: float32
221
+ length: 20
222
+ - name: SOCOPF/dual/jabr
223
+ dtype:
224
+ array2_d:
225
+ shape:
226
+ - 20
227
+ - 4
228
+ dtype: float32
229
+ - name: SOCOPF/dual/sm_fr
230
+ dtype:
231
+ array2_d:
232
+ shape:
233
+ - 20
234
+ - 3
235
+ dtype: float32
236
+ - name: SOCOPF/dual/sm_to
237
+ dtype:
238
+ array2_d:
239
+ shape:
240
+ - 20
241
+ - 3
242
+ dtype: float32
243
+ - name: SOCOPF/dual/va_diff
244
+ sequence: float32
245
+ length: 20
246
+ - name: SOCOPF/dual/wr
247
+ sequence: float32
248
+ length: 20
249
+ - name: SOCOPF/dual/wi
250
+ sequence: float32
251
+ length: 20
252
+ - name: SOCOPF/dual/pf
253
+ sequence: float32
254
+ length: 20
255
+ - name: SOCOPF/dual/pt
256
+ sequence: float32
257
+ length: 20
258
+ - name: SOCOPF/dual/qf
259
+ sequence: float32
260
+ length: 20
261
+ - name: SOCOPF/dual/qt
262
+ sequence: float32
263
+ length: 20
264
+ - name: SOCOPF/meta/seed
265
+ dtype: int64
266
+ - name: SOCOPF/meta/formulation
267
+ dtype: string
268
+ - name: SOCOPF/meta/primal_objective_value
269
+ dtype: float32
270
+ - name: SOCOPF/meta/dual_objective_value
271
+ dtype: float32
272
+ - name: SOCOPF/meta/primal_status
273
+ dtype: string
274
+ - name: SOCOPF/meta/dual_status
275
+ dtype: string
276
+ - name: SOCOPF/meta/termination_status
277
+ dtype: string
278
+ - name: SOCOPF/meta/build_time
279
+ dtype: float32
280
+ - name: SOCOPF/meta/extract_time
281
+ dtype: float32
282
+ - name: SOCOPF/meta/solve_time
283
+ dtype: float32
284
+ splits:
285
+ - name: train
286
+ num_bytes: 1569115561
287
+ num_examples: 317691
288
+ - name: test
289
+ num_bytes: 392280126
290
+ num_examples: 79423
291
+ download_size: 1487536971
292
+ dataset_size: 1961395687
293
+ ---
case.json.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7d0b8c1acaa6f3900f53ce7544606758a266bbbfd60dd58896393cc5a033da3
3
+ size 12865
config.toml ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Name of the reference PGLib case. Must be a valid PGLib case name.
2
+ pglib_case = "pglib_opf_case14_ieee"
3
+ floating_point_type = "Float32"
4
+
5
+ [sampler]
6
+ # data sampler options
7
+ [sampler.load]
8
+ noise_type = "ScaledUniform"
9
+ l = 0.7 # Lower bound of base load factor
10
+ u = 1.1 # Upper bound of base load factor
11
+ sigma = 0.20 # Relative (multiplicative) noise level.
12
+
13
+ [sampler.status]
14
+ type = "Nminus1"
15
+
16
+ [OPF]
17
+
18
+ [OPF.ACOPF]
19
+ type = "ACOPF"
20
+ solver.name = "Ipopt"
21
+ solver.attributes.tol = 1e-6
22
+ solver.attributes.linear_solver = "ma27"
23
+
24
+ [OPF.DCOPF]
25
+ # Formulation/solver options
26
+ type = "DCOPF"
27
+ solver.name = "HiGHS"
28
+
29
+ [OPF.SOCOPF]
30
+ type = "SOCOPF"
31
+ solver.name = "Clarabel"
32
+ # Tight tolerances
33
+ solver.attributes.tol_gap_abs = 1e-6
34
+ solver.attributes.tol_gap_rel = 1e-6
35
+ solver.attributes.tol_feas = 1e-6
36
+ solver.attributes.tol_infeas_rel = 1e-6
37
+ solver.attributes.tol_ktratio = 1e-6
38
+ # Reduced accuracy settings
39
+ solver.attributes.reduced_tol_gap_abs = 1e-6
40
+ solver.attributes.reduced_tol_gap_rel = 1e-6
41
+ solver.attributes.reduced_tol_feas = 1e-6
42
+ solver.attributes.reduced_tol_infeas_abs = 1e-6
43
+ solver.attributes.reduced_tol_infeas_rel = 1e-6
44
+ solver.attributes.reduced_tol_ktratio = 1e-6
infeasible/ACOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46242fcc5483c3f0cf91417b8b38a1e3631b7f3ac0cc42e0ace99d64573dd76e
3
+ size 91280412
infeasible/ACOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c94b83fbba22c57204efcef52d961de08821ab85ea76f7a2a4b6b863c4745c3d
3
+ size 3336518
infeasible/ACOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23fa9125509e406977828eb515105d41cb650dcba1a842e392b42d36e4fc6a53
3
+ size 40933977
infeasible/DCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:179a4cf70845c81160dc6ff633cdcd3205884902e669431ebc03ce4c14ed395b
3
+ size 455632
infeasible/DCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c50929a61b034ece9d791881363fb41f7e49f5cdec9b12a3404e4fa7bc365c3
3
+ size 3139823
infeasible/DCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11dd8f582287e399e42876f2dde041db898aea7874abc3bd0dc972bb04e2c357
3
+ size 4969539
infeasible/SOCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19a13a4fc652289adf80f3a28b5d2669d70b846f0583e1d11d908ecdffc478db
3
+ size 180954467
infeasible/SOCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d90516284b4b7e7b8cb0efe2a4cc3dace052eb3154336913ad9e3817366020e
3
+ size 3164174
infeasible/SOCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96fe2f0d33978adcdd108851ce8957d19cee5ffd4a3b8d6118a3dbb5dd695da4
3
+ size 2966905
infeasible/input.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d81efc24ad25e24dac62e67bb01cad7f9c0705dd32a931eee4d95a3aa147b79
3
+ size 8429925
test/ACOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2adce03d6bb1d83b00da126285462c8b7ec1064397ec8c71c14446b92d9e4c2
3
+ size 70940403
test/ACOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42d15485862bda86d92e005245d5bcb81741822870af0f30b01696b34b31125b
3
+ size 2589801
test/ACOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8457e0f8b3f0523bec07434204ed1fa3d51d3e6990983d4d8434a0699c9e46e7
3
+ size 31308841
test/DCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e25fd88b5f1851de28eaae195043526434dca3f54f50f1af25c6554b4c35460b
3
+ size 256949
test/DCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2c6a490a2678acf7ac814d6ae09c16102761d6f611f4e85c1f7aa93fe8573c5
3
+ size 2551559
test/DCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:976e3026ce647617c4939acef817ff6ee077695820cb4d18831ce18f3f45dd8f
3
+ size 9244043
test/SOCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aba5ccd9d792ab50fb1baa2c62f20ffbae895516687656f7fe9efe2317002184
3
+ size 131792217
test/SOCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33dc13dfc9c4e967b738667fe1ebded71a06b01b81440b77ecdb554f2bd86cf2
3
+ size 2684223
test/SOCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9eb205585430c078997997b9bff4470d24f18f913d7d0c40ee02f1411f9f168a
3
+ size 39646865
test/input.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9178f136504c6ba17d035237a17c13fb311b4a55794a0e0105561855cd9ff62
3
+ size 6539997
train/ACOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30ad2c925dbe8d85dae055303b0447a8e77694e870c96da98233478ab5299858
3
+ size 283745709
train/ACOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ae14b201628849fe0e55950804170925f6f20cb9a67396cd2293c5efd269d85
3
+ size 10314225
train/ACOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b501dd3a98c4beeb1e76d38c4e0946ceabddffa711c4577fb070a3f2e71876d
3
+ size 125223474
train/DCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6309b5a12f426ff17e4007bc4c2352ebf5273f5f62c8cb421d5b1ff6e8b83c2e
3
+ size 1022573
train/DCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9196cf94552ab742d080f40d1ad831b9998d45eb3ce359d007e6cb1ed7cc4f0b
3
+ size 10155431
train/DCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b30399f49b9c82825ece7fe655b8ec036e359e62ac44f277d23ea73e37cd9ae7
3
+ size 36971358
train/SOCOPF/dual.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a15b92c93c8249c2b0a90911cd3526bee6123b7918effb1cc4dcfd0f948e467
3
+ size 527138398
train/SOCOPF/meta.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c19e3e80543edc919cebd1d1ddff82138f6182c7064d94e4db5c61aae012f92a
3
+ size 10693683
train/SOCOPF/primal.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30fbb96447b3d657daa18eba2ec853cf6ba2874081a2b449bf8a261ef6243697
3
+ size 158573133
train/input.h5.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2adb48da0dd6997bc2360702deec2b1571dc394a4a05768e1dcb3920509d0fc8
3
+ size 26144089