Datasets:

Modalities:
Image
Text
Formats:
parquet
Size:
< 1K
Libraries:
Datasets
pandas
lhoestq HF staff commited on
Commit
221fcfd
1 Parent(s): c2f21de

Convert dataset to Parquet (#1)

Browse files

- Convert dataset to Parquet (94ba78a0f5c1573c81dd6ba5327044938ea38007)
- Delete data file (47d9c3dd25e4e08499b1cef355a3bdf51ed1ce5b)
- Delete data file (7f4f46812f99df193a466bc3cbf9c7706fa3ea38)
- Delete loading script (fb866f9ef976198db28135808f739374cd940b7b)
- Delete data file (230d5ac618f5b2e73dc1c0df8e8599955daa7ecd)

README.md ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ dataset_info:
3
+ features:
4
+ - name: image
5
+ dtype: image
6
+ - name: conditioning_image
7
+ dtype: image
8
+ - name: text
9
+ dtype: string
10
+ splits:
11
+ - name: train
12
+ num_bytes: 92595.0
13
+ num_examples: 10
14
+ download_size: 95645
15
+ dataset_size: 92595.0
16
+ configs:
17
+ - config_name: default
18
+ data_files:
19
+ - split: train
20
+ path: data/train-*
21
+ ---
conditioning_images.zip → data/train-00000-of-00001.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6beda7f2f2f9e9cc965b49654b1c6304304f8fb951f89fc9d16b6a9c71c8598a
3
- size 25048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d0bfda6dcf271b69b81a2ae5c1538a16d0cf957ec0d9e630b368088c7ead520
3
+ size 95645
fill10.py DELETED
@@ -1,101 +0,0 @@
1
- import pandas as pd
2
- from huggingface_hub import hf_hub_url
3
- import datasets
4
- import os
5
-
6
- _VERSION = datasets.Version("0.0.5")
7
-
8
- _DESCRIPTION = "TODO"
9
- _HOMEPAGE = "TODO"
10
- _LICENSE = "TODO"
11
- _CITATION = "TODO"
12
-
13
- _FEATURES = datasets.Features(
14
- {
15
- "image": datasets.Image(),
16
- "conditioning_image": datasets.Image(),
17
- "text": datasets.Value("string"),
18
- },
19
- )
20
-
21
- METADATA_URL = hf_hub_url(
22
- "hf-internal-testing/fill10",
23
- filename="train.jsonl",
24
- repo_type="dataset",
25
- )
26
-
27
- IMAGES_URL = hf_hub_url(
28
- "hf-internal-testing/fill10",
29
- filename="images.zip",
30
- repo_type="dataset",
31
- )
32
-
33
- CONDITIONING_IMAGES_URL = hf_hub_url(
34
- "hf-internal-testing/fill10",
35
- filename="conditioning_images.zip",
36
- repo_type="dataset",
37
- )
38
-
39
- _DEFAULT_CONFIG = datasets.BuilderConfig(name="default", version=_VERSION)
40
-
41
-
42
- class Fill50k(datasets.GeneratorBasedBuilder):
43
- BUILDER_CONFIGS = [_DEFAULT_CONFIG]
44
- DEFAULT_CONFIG_NAME = "default"
45
-
46
- def _info(self):
47
- return datasets.DatasetInfo(
48
- description=_DESCRIPTION,
49
- features=_FEATURES,
50
- supervised_keys=None,
51
- homepage=_HOMEPAGE,
52
- license=_LICENSE,
53
- citation=_CITATION,
54
- )
55
-
56
- def _split_generators(self, dl_manager):
57
- metadata_path = dl_manager.download(METADATA_URL)
58
- images_dir = dl_manager.download_and_extract(IMAGES_URL)
59
- conditioning_images_dir = dl_manager.download_and_extract(
60
- CONDITIONING_IMAGES_URL
61
- )
62
-
63
- return [
64
- datasets.SplitGenerator(
65
- name=datasets.Split.TRAIN,
66
- # These kwargs will be passed to _generate_examples
67
- gen_kwargs={
68
- "metadata_path": metadata_path,
69
- "images_dir": images_dir,
70
- "conditioning_images_dir": conditioning_images_dir,
71
- },
72
- ),
73
- ]
74
-
75
- def _generate_examples(self, metadata_path, images_dir, conditioning_images_dir):
76
- metadata = pd.read_json(metadata_path, lines=True)
77
-
78
- for _, row in metadata.iterrows():
79
- text = row["text"]
80
-
81
- image_path = row["image"]
82
- image_path = os.path.join(images_dir, image_path)
83
- image = open(image_path, "rb").read()
84
-
85
- conditioning_image_path = row["conditioning_image"]
86
- conditioning_image_path = os.path.join(
87
- conditioning_images_dir, row["conditioning_image"]
88
- )
89
- conditioning_image = open(conditioning_image_path, "rb").read()
90
-
91
- yield row["image"], {
92
- "text": text,
93
- "image": {
94
- "path": image_path,
95
- "bytes": image,
96
- },
97
- "conditioning_image": {
98
- "path": conditioning_image_path,
99
- "bytes": conditioning_image,
100
- },
101
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
images.zip DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e671f5002ac91469cc282c9fa1ea41ccf4cd3d1a21596cd68360638e17993a9e
3
- size 21998
 
 
 
 
train.jsonl DELETED
@@ -1,10 +0,0 @@
1
- {"text": "light coral circle with white background", "image": "images/1.png", "conditioning_image": "conditioning_images/1.png"}
2
- {"text": "aqua circle with light pink background", "image": "images/2.png", "conditioning_image": "conditioning_images/2.png"}
3
- {"text": "cornflower blue circle with light golden rod yellow background", "image": "images/3.png", "conditioning_image": "conditioning_images/3.png"}
4
- {"text": "light slate gray circle with blue background", "image": "images/4.png", "conditioning_image": "conditioning_images/4.png"}
5
- {"text": "light golden rod yellow circle with turquoise background", "image": "images/5.png", "conditioning_image": "conditioning_images/5.png"}
6
- {"text": "crimson circle with papaya whip background", "image": "images/6.png", "conditioning_image": "conditioning_images/6.png"}
7
- {"text": "aqua circle with slate blue background", "image": "images/7.png", "conditioning_image": "conditioning_images/7.png"}
8
- {"text": "dark magenta circle with cyan background", "image": "images/8.png", "conditioning_image": "conditioning_images/8.png"}
9
- {"text": "papaya whip circle with corn silk background", "image": "images/9.png", "conditioning_image": "conditioning_images/9.png"}
10
- {"text": "silver circle with powder blue background", "image": "images/10.png", "conditioning_image": "conditioning_images/10.png"}