diff --git a/6470_rte/test-00053-of-00075.parquet b/6470_rte/test-00053-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..6b0eb4975062e77afbeb05c2db1d834c2a65d399 --- /dev/null +++ b/6470_rte/test-00053-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:86ac05d829760dc17fda8004398ed33ff5197edffaaaec878d8b790b7130e4ef +size 492977115 diff --git a/6470_rte/test-00054-of-00075.parquet b/6470_rte/test-00054-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..93bb03d28fb936591e2b31b87febd7f9709f4c99 --- /dev/null +++ b/6470_rte/test-00054-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df4aafffb871134a3851a069de31a3d1c23f33109229b077a79262272bba409e +size 492855168 diff --git a/6470_rte/test-00055-of-00075.parquet b/6470_rte/test-00055-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..c153e8f25465626cbb0af5bc9af65ee38882b1f7 --- /dev/null +++ b/6470_rte/test-00055-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f22cf005704caf4f80699b8d42bc86d51f11de393bc595e1ddd65644cebf9751 +size 493031332 diff --git a/6470_rte/test-00056-of-00075.parquet b/6470_rte/test-00056-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..b2977202dd07f3102bfd04d1f6d7497e280e226f --- /dev/null +++ b/6470_rte/test-00056-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d65f394f156a73275ce30cde12e3b2370ebcba648f45edab61139c7092542309 +size 492951346 diff --git a/6470_rte/test-00057-of-00075.parquet b/6470_rte/test-00057-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..809bcb6f283094b3e17f18d34db2e0bdcecc9c87 --- /dev/null +++ b/6470_rte/test-00057-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a79337392cda3f43ab432adcc7e5c43465df94bae52af896f6202046ee8c2c18 +size 492886870 diff --git a/6470_rte/test-00058-of-00075.parquet b/6470_rte/test-00058-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..6a0d6408d3b715b6439059e63358d9cea96e1baf --- /dev/null +++ b/6470_rte/test-00058-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:534e3efd445c7b37bfa2a85e0390571b673abf45e717f144707879570ffe7e6d +size 492939578 diff --git a/6470_rte/test-00059-of-00075.parquet b/6470_rte/test-00059-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..6e91421645c3427bc4d08848a694e2e93b26c0e3 --- /dev/null +++ b/6470_rte/test-00059-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1fe62b6dcd00360f1314e9499a2c3c22381bec52540e571a409d075a4b22e34d +size 493037032 diff --git a/6470_rte/test-00060-of-00075.parquet b/6470_rte/test-00060-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..f5749dd09873cf34996e8b5217c334f27c8f6e0f --- /dev/null +++ b/6470_rte/test-00060-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:33d3581b210698f362bb4e4ee932ea93cea8ddc9e7e29775364e6f4c6735517d +size 492930855 diff --git a/6470_rte/test-00061-of-00075.parquet b/6470_rte/test-00061-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..37f05de419fa6bda3bb895171d92b883828bcfb3 --- /dev/null +++ b/6470_rte/test-00061-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:37f8c249b89a022761ae4ea44906e4f50117272703b1bd96769ce0740854d950 +size 492925622 diff --git a/6470_rte/test-00062-of-00075.parquet b/6470_rte/test-00062-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..6139be7e9d91d2f3741278c7b60e70afe410032b --- /dev/null +++ b/6470_rte/test-00062-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:046789c143103c82faf3caf8d574176ecfb76f76824e6a912df64bef8533d2d1 +size 492910007 diff --git a/6470_rte/test-00063-of-00075.parquet b/6470_rte/test-00063-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..15c83bead21b72715bf91a38f8fd6f5269dedae5 --- /dev/null +++ b/6470_rte/test-00063-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3c0b36d8c45c3c6e4ae7557a40803147d600e44ff1f54b9c4f5c1fc58676b044 +size 493108978 diff --git a/6470_rte/test-00064-of-00075.parquet b/6470_rte/test-00064-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..739f39a47398aa39903d8af9d4e49716a13949d1 --- /dev/null +++ b/6470_rte/test-00064-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5414a53486874fb5bafe147d7721a6ccfeb08be1357b00d0c8a17a19f3a9e177 +size 492911906 diff --git a/6470_rte/test-00065-of-00075.parquet b/6470_rte/test-00065-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..7aafbcb6b249ead2a1053b07d587315d16c865cc --- /dev/null +++ b/6470_rte/test-00065-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:597784a5ca3d5a74e7fad1a1bed5d2c8e90b6ee69af17ad4dc08676d633e94f4 +size 492925317 diff --git a/6470_rte/test-00066-of-00075.parquet b/6470_rte/test-00066-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..85c070f752a0a3cc49394500708ead95ff8fc637 --- /dev/null +++ b/6470_rte/test-00066-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6e997c90a743b7938825edf3f00f9d47a53656a2bf97c439b74daa5f5e013de5 +size 492921962 diff --git a/6470_rte/test-00067-of-00075.parquet b/6470_rte/test-00067-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..20c1b22139770ab45088433c12d42c2f579355f7 --- /dev/null +++ b/6470_rte/test-00067-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8f8aa391efd01e012c22dd36978198e47621ff8c0b71cd5dac70fa2582455b1 +size 492869293 diff --git a/6470_rte/test-00068-of-00075.parquet b/6470_rte/test-00068-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..2b41310f901c009e9869d848b2282dca72675ffc --- /dev/null +++ b/6470_rte/test-00068-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:09e7c4f58b4534e54edc9f4b070d41f4e4c1b2f5e4132cadfad248cf75f60ca8 +size 493019632 diff --git a/6470_rte/test-00069-of-00075.parquet b/6470_rte/test-00069-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..e3d5a10a8b5936fe4e043212470d533b1affd155 --- /dev/null +++ b/6470_rte/test-00069-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3dcdcdf167efa0c4c9da28a9c0f3024b78c8bf143a93c939e7ad563f238f2141 +size 492813169 diff --git a/6470_rte/test-00070-of-00075.parquet b/6470_rte/test-00070-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..235fcf20a51bab3dd58f0a0dfdbf16029b6197be --- /dev/null +++ b/6470_rte/test-00070-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:821d5a8b5c47d61f0dd08b1176538ded8443a6607c4bfceaf76144b1072ba8c4 +size 492926960 diff --git a/6470_rte/test-00071-of-00075.parquet b/6470_rte/test-00071-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..245d44c8b7845101e44c7430e141d913ea428d92 --- /dev/null +++ b/6470_rte/test-00071-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:08edaff41e630094e951d4c044c1e13993a24bbb7e4f285fe75a76fc95790b55 +size 492839396 diff --git a/6470_rte/test-00072-of-00075.parquet b/6470_rte/test-00072-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..26dbc4a9f29222ac8b68d4f7fecd6d95399afc55 --- /dev/null +++ b/6470_rte/test-00072-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5e08015251f116ffe52746fccfa0376a0de39cee35072a7baa1ee029b63d477 +size 493084853 diff --git a/6470_rte/test-00073-of-00075.parquet b/6470_rte/test-00073-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..6dccf249d4c40a7731ce4cc354b4491bbcc5f5d4 --- /dev/null +++ b/6470_rte/test-00073-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dc49b3c0f810c264af876d588636aa826ad95947c6b12156ec296333c398d2e0 +size 492997438 diff --git a/6470_rte/test-00074-of-00075.parquet b/6470_rte/test-00074-of-00075.parquet new file mode 100644 index 0000000000000000000000000000000000000000..c9f02ef1d38c983a16f343602eeb1e3e665a471e --- /dev/null +++ b/6470_rte/test-00074-of-00075.parquet @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c4abea55aeda06c10d362a23875bc2d39f95496035ed174867fe97e567039e4 +size 492958698 diff --git a/PGLearn-Large-6470_rte.py b/PGLearn-Large-6470_rte.py deleted file mode 100644 index aec28ad34ea280d38e0328e0db903757091659e8..0000000000000000000000000000000000000000 --- a/PGLearn-Large-6470_rte.py +++ /dev/null @@ -1,429 +0,0 @@ -from __future__ import annotations -from dataclasses import dataclass -from pathlib import Path -import json -import shutil - -import datasets as hfd -import h5py -import pgzip as gzip -import pyarrow as pa - -# ┌──────────────┐ -# │ Metadata │ -# └──────────────┘ - -@dataclass -class CaseSizes: - n_bus: int - n_load: int - n_gen: int - n_branch: int - -CASENAME = "6470_rte" -SIZES = CaseSizes(n_bus=6470, n_load=3670, n_gen=761, n_branch=9005) -NUM_TRAIN = 73912 -NUM_TEST = 18478 -NUM_INFEASIBLE = 7628 -SPLITFILES = { - "train/SOCOPF/dual.h5.gz": ["train/SOCOPF/dual/xaa", "train/SOCOPF/dual/xab"], -} - -URL = "https://huggingface.co/datasets/PGLearn/PGLearn-Large-6470_rte" -DESCRIPTION = """\ -The 6470_rte PGLearn optimal power flow dataset, part of the PGLearn-Large collection. \ -""" -VERSION = hfd.Version("1.0.0") -DEFAULT_CONFIG_DESCRIPTION="""\ -This configuration contains feasible input, primal solution, and dual solution data \ -for the ACOPF, DCOPF, and SOCOPF formulations on the {case} system. For case data, \ -download the case.json.gz file from the `script` branch of the repository. \ -https://huggingface.co/datasets/PGLearn/PGLearn-Large-6470_rte/blob/script/case.json.gz -""" -USE_ML4OPF_WARNING = """ -================================================================================================ - Loading PGLearn-Large-6470_rte through the `datasets.load_dataset` function may be slow. - - Consider using ML4OPF to directly convert to `torch.Tensor`; for more info see: - https://github.com/AI4OPT/ML4OPF?tab=readme-ov-file#manually-loading-data - - Or, use `huggingface_hub.snapshot_download` and an HDF5 reader; for more info see: - https://huggingface.co/datasets/PGLearn/PGLearn-Large-6470_rte#downloading-individual-files -================================================================================================ -""" -CITATION = """\ -@article{klamkinpglearn, - title={{PGLearn - An Open-Source Learning Toolkit for Optimal Power Flow}}, - author={Klamkin, Michael and Tanneau, Mathieu and Van Hentenryck, Pascal}, - year={2025}, -}\ -""" - -IS_COMPRESSED = True - -# ┌──────────────────┐ -# │ Formulations │ -# └──────────────────┘ - -def acopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool): - features = {} - if primal: features.update(acopf_primal_features(sizes)) - if dual: features.update(acopf_dual_features(sizes)) - if meta: features.update({f"ACOPF/{k}": v for k, v in META_FEATURES.items()}) - return features - -def dcopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool): - features = {} - if primal: features.update(dcopf_primal_features(sizes)) - if dual: features.update(dcopf_dual_features(sizes)) - if meta: features.update({f"DCOPF/{k}": v for k, v in META_FEATURES.items()}) - return features - -def socopf_features(sizes: CaseSizes, primal: bool, dual: bool, meta: bool): - features = {} - if primal: features.update(socopf_primal_features(sizes)) - if dual: features.update(socopf_dual_features(sizes)) - if meta: features.update({f"SOCOPF/{k}": v for k, v in META_FEATURES.items()}) - return features - -FORMULATIONS_TO_FEATURES = { - "ACOPF": acopf_features, - "DCOPF": dcopf_features, - "SOCOPF": socopf_features, -} - -# ┌───────────────────┐ -# │ BuilderConfig │ -# └───────────────────┘ - -class PGLearnLarge6470_rteConfig(hfd.BuilderConfig): - """BuilderConfig for PGLearn-Large-6470_rte. - By default, primal solution data, metadata, input, casejson, are included for the train and test splits. - - To modify the default configuration, pass attributes of this class to `datasets.load_dataset`: - - Attributes: - formulations (list[str]): The formulation(s) to include, e.g. ["ACOPF", "DCOPF"] - primal (bool, optional): Include primal solution data. Defaults to True. - dual (bool, optional): Include dual solution data. Defaults to False. - meta (bool, optional): Include metadata. Defaults to True. - input (bool, optional): Include input data. Defaults to True. - casejson (bool, optional): Include case.json data. Defaults to True. - train (bool, optional): Include training samples. Defaults to True. - test (bool, optional): Include testing samples. Defaults to True. - infeasible (bool, optional): Include infeasible samples. Defaults to False. - """ - def __init__(self, - formulations: list[str], - primal: bool=True, dual: bool=False, meta: bool=True, input: bool = True, casejson: bool=True, - train: bool=True, test: bool=True, infeasible: bool=False, - compressed: bool=IS_COMPRESSED, **kwargs - ): - super(PGLearnLarge6470_rteConfig, self).__init__(version=VERSION, **kwargs) - - self.case = CASENAME - self.formulations = formulations - - self.primal = primal - self.dual = dual - self.meta = meta - self.input = input - self.casejson = casejson - - self.train = train - self.test = test - self.infeasible = infeasible - - self.gz_ext = ".gz" if compressed else "" - - @property - def size(self): - return SIZES - - @property - def features(self): - features = {} - if self.casejson: features.update(case_features()) - if self.input: features.update(input_features(SIZES)) - for formulation in self.formulations: - features.update(FORMULATIONS_TO_FEATURES[formulation](SIZES, self.primal, self.dual, self.meta)) - return hfd.Features(features) - - @property - def splits(self): - splits: dict[hfd.Split, dict[str, str | int]] = {} - if self.train: - splits[hfd.Split.TRAIN] = { - "name": "train", - "num_examples": NUM_TRAIN - } - if self.test: - splits[hfd.Split.TEST] = { - "name": "test", - "num_examples": NUM_TEST - } - if self.infeasible: - splits[hfd.Split("infeasible")] = { - "name": "infeasible", - "num_examples": NUM_INFEASIBLE - } - return splits - - @property - def urls(self): - urls: dict[str, None | str | list] = { - "case": None, "train": [], "test": [], "infeasible": [], - } - - if self.casejson: - urls["case"] = f"case.json" + self.gz_ext - else: - urls.pop("case") - - split_names = [] - if self.train: split_names.append("train") - if self.test: split_names.append("test") - if self.infeasible: split_names.append("infeasible") - - for split in split_names: - if self.input: urls[split].append(f"{split}/input.h5" + self.gz_ext) - for formulation in self.formulations: - if self.primal: - filename = f"{split}/{formulation}/primal.h5" + self.gz_ext - if filename in SPLITFILES: urls[split].append(SPLITFILES[filename]) - else: urls[split].append(filename) - if self.dual: - filename = f"{split}/{formulation}/dual.h5" + self.gz_ext - if filename in SPLITFILES: urls[split].append(SPLITFILES[filename]) - else: urls[split].append(filename) - if self.meta: - filename = f"{split}/{formulation}/meta.h5" + self.gz_ext - if filename in SPLITFILES: urls[split].append(SPLITFILES[filename]) - else: urls[split].append(filename) - return urls - -# ┌────────────────────┐ -# │ DatasetBuilder │ -# └────────────────────┘ - -class PGLearnLarge6470_rte(hfd.ArrowBasedBuilder): - """DatasetBuilder for PGLearn-Large-6470_rte. - The main interface is `datasets.load_dataset` with `trust_remote_code=True`, e.g. - - ```python - from datasets import load_dataset - ds = load_dataset("PGLearn/PGLearn-Large-6470_rte", trust_remote_code=True, - # modify the default configuration by passing kwargs - formulations=["DCOPF"], - dual=False, - meta=False, - ) - ``` - """ - - DEFAULT_WRITER_BATCH_SIZE = 10000 - BUILDER_CONFIG_CLASS = PGLearnLarge6470_rteConfig - DEFAULT_CONFIG_NAME=CASENAME - BUILDER_CONFIGS = [ - PGLearnLarge6470_rteConfig( - name=CASENAME, description=DEFAULT_CONFIG_DESCRIPTION.format(case=CASENAME), - formulations=list(FORMULATIONS_TO_FEATURES.keys()), - primal=True, dual=True, meta=True, input=True, casejson=False, - train=True, test=True, infeasible=False, - ) - ] - - def _info(self): - return hfd.DatasetInfo( - features=self.config.features, splits=self.config.splits, - description=DESCRIPTION + self.config.description, - homepage=URL, citation=CITATION, - ) - - def _split_generators(self, dl_manager: hfd.DownloadManager): - hfd.logging.get_logger().warning(USE_ML4OPF_WARNING) - - filepaths = dl_manager.download_and_extract(self.config.urls) - - splits: list[hfd.SplitGenerator] = [] - if self.config.train: - splits.append(hfd.SplitGenerator( - name=hfd.Split.TRAIN, - gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["train"]), n_samples=NUM_TRAIN), - )) - if self.config.test: - splits.append(hfd.SplitGenerator( - name=hfd.Split.TEST, - gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["test"]), n_samples=NUM_TEST), - )) - if self.config.infeasible: - splits.append(hfd.SplitGenerator( - name=hfd.Split("infeasible"), - gen_kwargs=dict(case_file=filepaths.get("case", None), data_files=tuple(filepaths["infeasible"]), n_samples=NUM_INFEASIBLE), - )) - return splits - - def _generate_tables(self, case_file: str | None, data_files: tuple[hfd.utils.track.tracked_str | list[hfd.utils.track.tracked_str]], n_samples: int): - case_data: str | None = json.dumps(json.load(open_maybe_gzip_cat(case_file))) if case_file is not None else None - data: dict[str, h5py.File] = {} - for file in data_files: - v = h5py.File(open_maybe_gzip_cat(file), "r") - if isinstance(file, list): - k = "/".join(Path(file[0].get_origin()).parts[-3:-1]).split(".")[0] - else: - k = "/".join(Path(file.get_origin()).parts[-2:]).split(".")[0] - data[k] = v - for k in list(data.keys()): - if "/input" in k: data[k.split("/", 1)[1]] = data.pop(k) - - batch_size = self._writer_batch_size or self.DEFAULT_WRITER_BATCH_SIZE - for i in range(0, n_samples, batch_size): - effective_batch_size = min(batch_size, n_samples - i) - - sample_data = { - f"{dk}/{k}": - hfd.features.features.numpy_to_pyarrow_listarray(v[i:i + effective_batch_size, ...]) - for dk, d in data.items() for k, v in d.items() if f"{dk}/{k}" in self.config.features - } - - if case_data is not None: - sample_data["case/json"] = pa.array([case_data] * effective_batch_size) - - yield i, pa.Table.from_pydict(sample_data) - - for f in data.values(): - f.close() - -# ┌──────────────┐ -# │ Features │ -# └──────────────┘ - -FLOAT_TYPE = "float32" -INT_TYPE = "int64" -BOOL_TYPE = "bool" -STRING_TYPE = "string" - -def case_features(): - # FIXME: better way to share schema of case data -- need to treat jagged arrays - return { - "case/json": hfd.Value(STRING_TYPE), - } - -META_FEATURES = { - "meta/seed": hfd.Value(dtype=INT_TYPE), - "meta/formulation": hfd.Value(dtype=STRING_TYPE), - "meta/primal_objective_value": hfd.Value(dtype=FLOAT_TYPE), - "meta/dual_objective_value": hfd.Value(dtype=FLOAT_TYPE), - "meta/primal_status": hfd.Value(dtype=STRING_TYPE), - "meta/dual_status": hfd.Value(dtype=STRING_TYPE), - "meta/termination_status": hfd.Value(dtype=STRING_TYPE), - "meta/build_time": hfd.Value(dtype=FLOAT_TYPE), - "meta/extract_time": hfd.Value(dtype=FLOAT_TYPE), - "meta/solve_time": hfd.Value(dtype=FLOAT_TYPE), -} - -def input_features(sizes: CaseSizes): - return { - "input/pd": hfd.Sequence(length=sizes.n_load, feature=hfd.Value(dtype=FLOAT_TYPE)), - "input/qd": hfd.Sequence(length=sizes.n_load, feature=hfd.Value(dtype=FLOAT_TYPE)), - "input/gen_status": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=BOOL_TYPE)), - "input/branch_status": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=BOOL_TYPE)), - "input/seed": hfd.Value(dtype=INT_TYPE), - } - -def acopf_primal_features(sizes: CaseSizes): - return { - "ACOPF/primal/vm": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/primal/va": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/primal/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/primal/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/primal/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/primal/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - } -def acopf_dual_features(sizes: CaseSizes): - return { - "ACOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/kcl_q": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/vm": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/ohm_pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/ohm_qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/ohm_qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/sm_fr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/sm_to": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "ACOPF/dual/slack_bus": hfd.Value(dtype=FLOAT_TYPE), - } -def dcopf_primal_features(sizes: CaseSizes): - return { - "DCOPF/primal/va": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)), - "DCOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)), - "DCOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - } -def dcopf_dual_features(sizes: CaseSizes): - return { - "DCOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)), - "DCOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)), - "DCOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "DCOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "DCOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "DCOPF/dual/slack_bus": hfd.Value(dtype=FLOAT_TYPE), - } -def socopf_primal_features(sizes: CaseSizes): - return { - "SOCOPF/primal/w": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/primal/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/primal/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/primal/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/primal/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/primal/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/primal/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/primal/wr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/primal/wi": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - } -def socopf_dual_features(sizes: CaseSizes): - return { - "SOCOPF/dual/kcl_p": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/kcl_q": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/w": hfd.Sequence(length=sizes.n_bus, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/pg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/qg": hfd.Sequence(length=sizes.n_gen, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/ohm_pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/ohm_pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/ohm_qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/ohm_qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/jabr": hfd.Array2D(shape=(sizes.n_branch, 4), dtype=FLOAT_TYPE), - "SOCOPF/dual/sm_fr": hfd.Array2D(shape=(sizes.n_branch, 3), dtype=FLOAT_TYPE), - "SOCOPF/dual/sm_to": hfd.Array2D(shape=(sizes.n_branch, 3), dtype=FLOAT_TYPE), - "SOCOPF/dual/va_diff": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/wr": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/wi": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/pf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/pt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/qf": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - "SOCOPF/dual/qt": hfd.Sequence(length=sizes.n_branch, feature=hfd.Value(dtype=FLOAT_TYPE)), - } - -# ┌───────────────┐ -# │ Utilities │ -# └───────────────┘ - -def open_maybe_gzip_cat(path: str | list): - if isinstance(path, list): - dest = Path(path[0]).parent.with_suffix(".h5") - if not dest.exists(): - with open(dest, "wb") as dest_f: - for piece in path: - with open(piece, "rb") as piece_f: - shutil.copyfileobj(piece_f, dest_f) - shutil.rmtree(Path(piece).parent) - path = dest.as_posix() - return gzip.open(path, "rb") if path.endswith(".gz") else open(path, "rb") diff --git a/README.md b/README.md index 1dfc44b19010af20cc4b9d69e9d46022907e9293..097133531024d6bf71f56919f95615d1061492f2 100644 --- a/README.md +++ b/README.md @@ -288,6 +288,14 @@ dataset_info: - name: test num_bytes: 37010930475 num_examples: 18478 - download_size: 160191271379 + download_size: 185038153518 dataset_size: 185054652373 +configs: +- config_name: 6470_rte + data_files: + - split: train + path: 6470_rte/train-* + - split: test + path: 6470_rte/test-* + default: true --- diff --git a/case.json.gz b/case.json.gz deleted file mode 100644 index e435369e02206200024131f7c9295ad833caeb1f..0000000000000000000000000000000000000000 --- a/case.json.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2a5bcccfae90e14f99b4556bd737be8c8a32409848255413030b7fd44c582538 -size 5183519 diff --git a/config.toml b/config.toml deleted file mode 100644 index abee7644f9156d29f48f93e5ca3ac7ed367e8d66..0000000000000000000000000000000000000000 --- a/config.toml +++ /dev/null @@ -1,42 +0,0 @@ -# Name of the reference PGLib case. Must be a valid PGLib case name. -pglib_case = "pglib_opf_case6470_rte" -floating_point_type = "Float32" - -[sampler] -# data sampler options -[sampler.load] -noise_type = "ScaledUniform" -l = 0.6 # Lower bound of base load factor -u = 1.0 # Upper bound of base load factor -sigma = 0.20 # Relative (multiplicative) noise level. - - -[OPF] - -[OPF.ACOPF] -type = "ACOPF" -solver.name = "Ipopt" -solver.attributes.tol = 1e-6 -solver.attributes.linear_solver = "ma27" - -[OPF.DCOPF] -# Formulation/solver options -type = "DCOPF" -solver.name = "HiGHS" - -[OPF.SOCOPF] -type = "SOCOPF" -solver.name = "Clarabel" -# Tight tolerances -solver.attributes.tol_gap_abs = 1e-6 -solver.attributes.tol_gap_rel = 1e-6 -solver.attributes.tol_feas = 1e-6 -solver.attributes.tol_infeas_rel = 1e-6 -solver.attributes.tol_ktratio = 1e-6 -# Reduced accuracy settings -solver.attributes.reduced_tol_gap_abs = 1e-6 -solver.attributes.reduced_tol_gap_rel = 1e-6 -solver.attributes.reduced_tol_feas = 1e-6 -solver.attributes.reduced_tol_infeas_abs = 1e-6 -solver.attributes.reduced_tol_infeas_rel = 1e-6 -solver.attributes.reduced_tol_ktratio = 1e-6 diff --git a/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-45.out b/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-45.out deleted file mode 100644 index 89d8d5451aff4a506963fa013428cfef9e97aec1..0000000000000000000000000000000000000000 --- a/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-45.out +++ /dev/null @@ -1,9 +0,0 @@ ---------------------------------------- -Begin Slurm Prolog: May-03-2025 23:20:55 -Job ID: 4261837 -User ID: mklamkin3 -Account: gts-phentenryck3-coda20 -Job name: OPF -Partition: cpu-small -QOS: embers ---------------------------------------- diff --git a/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-46.out b/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-46.out deleted file mode 100644 index 762026969cfa6dc5a1735886f9c44d6c106096fc..0000000000000000000000000000000000000000 --- a/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-46.out +++ /dev/null @@ -1,9 +0,0 @@ ---------------------------------------- -Begin Slurm Prolog: May-03-2025 23:20:55 -Job ID: 4261838 -User ID: mklamkin3 -Account: gts-phentenryck3-coda20 -Job name: OPF -Partition: cpu-small -QOS: embers ---------------------------------------- diff --git a/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-47.out b/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-47.out deleted file mode 100644 index 2ee9de688f7b9019421c17216c17d7473a2f3c46..0000000000000000000000000000000000000000 --- a/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-47.out +++ /dev/null @@ -1,9 +0,0 @@ ---------------------------------------- -Begin Slurm Prolog: May-03-2025 23:20:55 -Job ID: 4261839 -User ID: mklamkin3 -Account: gts-phentenryck3-coda20 -Job name: OPF -Partition: cpu-small -QOS: embers ---------------------------------------- diff --git a/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-48.out b/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-48.out deleted file mode 100644 index cabaefa092516bc9ef77304a2c9493e5c6e32333..0000000000000000000000000000000000000000 --- a/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-48.out +++ /dev/null @@ -1,9 +0,0 @@ ---------------------------------------- -Begin Slurm Prolog: May-03-2025 23:20:55 -Job ID: 4261840 -User ID: mklamkin3 -Account: gts-phentenryck3-coda20 -Job name: OPF -Partition: cpu-small -QOS: embers ---------------------------------------- diff --git a/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-49.out b/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-49.out deleted file mode 100644 index 9f5c9d99579c906fd6180c9ac670a2eebc4f748c..0000000000000000000000000000000000000000 --- a/data/pglearn/9241_pegase/slurm/logs/OPF.4261836-49.out +++ /dev/null @@ -1,9 +0,0 @@ ---------------------------------------- -Begin Slurm Prolog: May-03-2025 23:20:55 -Job ID: 4261836 -User ID: mklamkin3 -Account: gts-phentenryck3-coda20 -Job name: OPF -Partition: cpu-small -QOS: embers ---------------------------------------- diff --git a/infeasible/ACOPF/dual.h5.gz b/infeasible/ACOPF/dual.h5.gz deleted file mode 100644 index a0b793feeeafbe4872c3ffc7ae4464731083773f..0000000000000000000000000000000000000000 --- a/infeasible/ACOPF/dual.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9cc7721d1899210d8f2511ca281d56adce608033cbfe4552aa3ef0c92ba10c8a -size 3279205926 diff --git a/infeasible/ACOPF/meta.h5.gz b/infeasible/ACOPF/meta.h5.gz deleted file mode 100644 index 528cae564135bc65fc75d414f54eb581dd7cae44..0000000000000000000000000000000000000000 --- a/infeasible/ACOPF/meta.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:65191c0dd3463a074d8dbdce83589601dd6c4321973761e1f3c72b41beec632f -size 287007 diff --git a/infeasible/ACOPF/primal.h5.gz b/infeasible/ACOPF/primal.h5.gz deleted file mode 100644 index faa994bb812f773be984f32c7900f98e470f7800..0000000000000000000000000000000000000000 --- a/infeasible/ACOPF/primal.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:53314e3232ade5ae54ed226cb786fc92f3adec30d90340fd05edf513525670dc -size 1383280253 diff --git a/infeasible/DCOPF/dual.h5.gz b/infeasible/DCOPF/dual.h5.gz deleted file mode 100644 index f1dd2d109c83478501526ad7c40ac8390b5420bf..0000000000000000000000000000000000000000 --- a/infeasible/DCOPF/dual.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5d9299d6ea96f792f752deaee790f4168fc2b31b112a3288756f761435624f1e -size 163514808 diff --git a/infeasible/DCOPF/meta.h5.gz b/infeasible/DCOPF/meta.h5.gz deleted file mode 100644 index 916db8d6304955b825599ea72aba49277b5fe6d4..0000000000000000000000000000000000000000 --- a/infeasible/DCOPF/meta.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2273bc34d1cfe93d8722c9234a34247892b29f57bd616d177b2c3f451b3da2c9 -size 273341 diff --git a/infeasible/DCOPF/primal.h5.gz b/infeasible/DCOPF/primal.h5.gz deleted file mode 100644 index 0e9242e14e2f8c5d17eae50b7109a5eca05ed8f3..0000000000000000000000000000000000000000 --- a/infeasible/DCOPF/primal.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e369fddc2117691d30efed97fd2a12b952927f04fc5fd35afe7e81c354e5e2f1 -size 208432520 diff --git a/infeasible/SOCOPF/dual.h5.gz b/infeasible/SOCOPF/dual.h5.gz deleted file mode 100644 index 7972bdbdf776b69fe1e06eb27c5e752854a142c4..0000000000000000000000000000000000000000 --- a/infeasible/SOCOPF/dual.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f9fbf1d778c89225ba88e14d433020f331e6e43c94bb20a561610118ea65abdb -size 5698644882 diff --git a/infeasible/SOCOPF/meta.h5.gz b/infeasible/SOCOPF/meta.h5.gz deleted file mode 100644 index c514469ae7bd5abc458a3d85bba90457b865a368..0000000000000000000000000000000000000000 --- a/infeasible/SOCOPF/meta.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:bb75a457b358fb1b76de1a64ba5f28c406747334199062f38b562a68c039241e -size 269976 diff --git a/infeasible/SOCOPF/primal.h5.gz b/infeasible/SOCOPF/primal.h5.gz deleted file mode 100644 index ce1c3085c977cb8fe22cfd17ccc7c0b3df796024..0000000000000000000000000000000000000000 --- a/infeasible/SOCOPF/primal.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:22e5a544f9015a6109806cc6d3a664990775e3f4384a9580334f73efb7887edb -size 1472197148 diff --git a/infeasible/input.h5.gz b/infeasible/input.h5.gz deleted file mode 100644 index 35badc2eab9958feee6aa2f97d824f722df6b7d3..0000000000000000000000000000000000000000 --- a/infeasible/input.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:1becbfc0458db85fa688dc34282bdeddb91b8650a872925eaf15b49e80e0f14a -size 202109735 diff --git a/test/ACOPF/dual.h5.gz b/test/ACOPF/dual.h5.gz deleted file mode 100644 index 8b526e22da5a3a0dbb3ed3d1d12d04000e263eb6..0000000000000000000000000000000000000000 --- a/test/ACOPF/dual.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a9a741e9f8664c2a72ed97e07d243fc978f9f7089b2ce88a25c949e2bd42074d -size 7298150074 diff --git a/test/ACOPF/meta.h5.gz b/test/ACOPF/meta.h5.gz deleted file mode 100644 index f084668a9719b649ffbc0defdebff63de16e981c..0000000000000000000000000000000000000000 --- a/test/ACOPF/meta.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8932327179af34efc8b6c0f6e98087646ebcf25f5ed24bea20f7542955a8c860 -size 634973 diff --git a/test/ACOPF/primal.h5.gz b/test/ACOPF/primal.h5.gz deleted file mode 100644 index ea34eb5467ddabf2fbd95feeeaab94b6f11341c7..0000000000000000000000000000000000000000 --- a/test/ACOPF/primal.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2d621b285d1a0293b401060507e39255d3aef972aa5936e8618ad38235f1c765 -size 3303038711 diff --git a/test/DCOPF/dual.h5.gz b/test/DCOPF/dual.h5.gz deleted file mode 100644 index 44dc7f79725c436d51222a850eac6420c53f3fee..0000000000000000000000000000000000000000 --- a/test/DCOPF/dual.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:5622255fb05d1015d6634745818fac8801a4c30e60d5c6ef4a42da6e339df021 -size 758845639 diff --git a/test/DCOPF/meta.h5.gz b/test/DCOPF/meta.h5.gz deleted file mode 100644 index 5271edc7f8c58b4b6216d1ac0e03a83ae60200ab..0000000000000000000000000000000000000000 --- a/test/DCOPF/meta.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:cd0007122327925991cbe8edf7c97231e9b10edc528eb3087249f9fd582f1006 -size 630592 diff --git a/test/DCOPF/primal.h5.gz b/test/DCOPF/primal.h5.gz deleted file mode 100644 index c4a64afbff95b90bb8a01763e2414f6144dd273e..0000000000000000000000000000000000000000 --- a/test/DCOPF/primal.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d98b173559ae2cb7cb49faaa26a5a45edac129056008c53e50a672b97fdebf82 -size 985626601 diff --git a/test/SOCOPF/dual.h5.gz b/test/SOCOPF/dual.h5.gz deleted file mode 100644 index ae4fe920b222fd801109fb4b8f8daaac85eb62db..0000000000000000000000000000000000000000 --- a/test/SOCOPF/dual.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:82e4b2b56e2cfeedac285f510be53af3569fa8757bc5e3e9dafa3097cfcef9a1 -size 13493024102 diff --git a/test/SOCOPF/meta.h5.gz b/test/SOCOPF/meta.h5.gz deleted file mode 100644 index 6d4cf3f16749edda69409f0ff04f591122f54086..0000000000000000000000000000000000000000 --- a/test/SOCOPF/meta.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:60ce99a04e8e4aaf8e631db27fbfd9d8261675e92923f88dad2133f8d21bd314 -size 635115 diff --git a/test/SOCOPF/primal.h5.gz b/test/SOCOPF/primal.h5.gz deleted file mode 100644 index ef9a790b8238919ffe5fabde674e2ae2b2510993..0000000000000000000000000000000000000000 --- a/test/SOCOPF/primal.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:950a28e7c0176ce623ca9063b82fdcb16a7225a93d2f27cad738d9d6d2e482dc -size 4082946361 diff --git a/test/input.h5.gz b/test/input.h5.gz deleted file mode 100644 index c1ef2f52ca658e116dd6949a748fa9f796a04533..0000000000000000000000000000000000000000 --- a/test/input.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:eaab5fe6642793cc7b8733cc8008b31f75e6463b6035ffaf3bfa7b8aa019d834 -size 489639546 diff --git a/train/ACOPF/dual.h5.gz b/train/ACOPF/dual.h5.gz deleted file mode 100644 index 11fcdf414beb302b2e3032934f79ed640ccbfbcc..0000000000000000000000000000000000000000 --- a/train/ACOPF/dual.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4d72cc0f17693e9a0160e4a3b1e1ce1b7070da95fb6e7265aa827fbe78029a7f -size 29192858819 diff --git a/train/ACOPF/meta.h5.gz b/train/ACOPF/meta.h5.gz deleted file mode 100644 index 785f8591c33f7ff629cfd0398468878fd523af54..0000000000000000000000000000000000000000 --- a/train/ACOPF/meta.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:a94933714ce872f3d7f60a76bb22f7bbdb2796d46b2ecbaecf44e3d1e3308d1e -size 2501217 diff --git a/train/ACOPF/primal.h5.gz b/train/ACOPF/primal.h5.gz deleted file mode 100644 index 0249e8aaa99929217ec58cdd8ef4ff905b378ed4..0000000000000000000000000000000000000000 --- a/train/ACOPF/primal.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d7b4102860f1f03cdb43faa9cf034711f27106e9895d07b7a94f1a8ea55f19a7 -size 13212119576 diff --git a/train/DCOPF/dual.h5.gz b/train/DCOPF/dual.h5.gz deleted file mode 100644 index 81ed9ce173e955dea40db5db2e0a9808d4ad27f0..0000000000000000000000000000000000000000 --- a/train/DCOPF/dual.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:019dda851b04a1cfcb187b7f6668513194f2ec0633f1520d23c91301d2297610 -size 3035867618 diff --git a/train/DCOPF/meta.h5.gz b/train/DCOPF/meta.h5.gz deleted file mode 100644 index 9a6dbda2a1249a8ca8d880a7251d48a94927f2e5..0000000000000000000000000000000000000000 --- a/train/DCOPF/meta.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:f9d247bd5e2a773a4e7ee88662287103b0edae450913c13ffcd12697ebc30757 -size 2483519 diff --git a/train/DCOPF/primal.h5.gz b/train/DCOPF/primal.h5.gz deleted file mode 100644 index 2490dee1a0e24f6e7b89d3729c701e7aec7bc124..0000000000000000000000000000000000000000 --- a/train/DCOPF/primal.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d906eb60589acba1c5cb6f52ffeb0a8984d381589393994ee067d4208ebdf06a -size 3942441475 diff --git a/train/SOCOPF/dual/xaa b/train/SOCOPF/dual/xaa deleted file mode 100644 index a3ee6a2928594db87b13f70841601f0df53cad21..0000000000000000000000000000000000000000 --- a/train/SOCOPF/dual/xaa +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2731b3b034dc779e4e4db2754667fc2469f2c08bb7b69e0d32782c03fb9ae80b -size 32212254720 diff --git a/train/SOCOPF/dual/xab b/train/SOCOPF/dual/xab deleted file mode 100644 index d6ebbb517652e8d7a3f73c1b552ae5b25228a313..0000000000000000000000000000000000000000 --- a/train/SOCOPF/dual/xab +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:2988ce95261da86249dcaab4b7e80856b0f9a46a1df8ca0170ed1537a63fbae9 -size 29884772448 diff --git a/train/SOCOPF/meta.h5.gz b/train/SOCOPF/meta.h5.gz deleted file mode 100644 index c50395d91312d08aa29dec4769a18146e6d7ef78..0000000000000000000000000000000000000000 --- a/train/SOCOPF/meta.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:348e97750d34ece492075d3eacb1a33a69d647c85f258379b03bb19b27bd9e9f -size 2499071 diff --git a/train/SOCOPF/primal.h5.gz b/train/SOCOPF/primal.h5.gz deleted file mode 100644 index 599704e778b09a67604a97a8ccc98eac5bd2ed32..0000000000000000000000000000000000000000 --- a/train/SOCOPF/primal.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:8e57551e2555cf290b3bd715963bd725f2a8f53d0990d1c33d3d504916ac706d -size 16331772453 diff --git a/train/input.h5.gz b/train/input.h5.gz deleted file mode 100644 index dcb9b76765f16257f76b7b460a42481b216e2d54..0000000000000000000000000000000000000000 --- a/train/input.h5.gz +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e775db2dca0c2a9c6333b7be5610ed6e1e25383bcab3a7cdafa6f7c5f57ad1fe -size 1958528749