adafactor
Browse files- README.md +30 -15
- dataset_folder.py → dataset_fromfolder.py +0 -0
- samples/sdxs_192x320_0.jpg +0 -0
- samples/sdxs_192x384_0.jpg +0 -0
- samples/sdxs_256x320_0.jpg +0 -0
- samples/sdxs_256x384_0.jpg +0 -0
- samples/sdxs_320x192_0.jpg +0 -0
- samples/sdxs_320x256_0.jpg +0 -0
- samples/sdxs_320x320_0.jpg +0 -0
- samples/sdxs_320x384_0.jpg +0 -0
- samples/sdxs_384x192_0.jpg +0 -0
- samples/sdxs_384x256_0.jpg +0 -0
- samples/sdxs_384x320_0.jpg +0 -0
- samples/sdxs_384x384_0.jpg +0 -0
- train.py +5 -4
README.md
CHANGED
@@ -5,32 +5,47 @@ license: apache-2.0
|
|
5 |
Краткая инструкция по установке
|
6 |
Обновите систему и установите git-lfs:
|
7 |
|
8 |
-
```
|
9 |
-
apt
|
|
|
|
|
10 |
Обновите pip и установите требуемые пакеты:
|
11 |
|
12 |
-
```
|
13 |
-
pip install
|
|
|
|
|
14 |
Настройте git-credentials:
|
15 |
|
16 |
-
```
|
|
|
|
|
17 |
Клонируйте репозиторий:
|
18 |
|
19 |
-
```
|
20 |
-
|
|
|
|
|
21 |
Подготовьте датасет:
|
22 |
|
23 |
-
```
|
|
|
24 |
cd datasets
|
25 |
-
git clone https://huggingface.co/datasets/AiArtLab/imagenet-1kk
|
|
|
26 |
Выполните вход в сервисы:
|
27 |
|
28 |
-
```
|
29 |
-
|
|
|
|
|
30 |
Настройте accelerate для многокарточного обучения:
|
31 |
|
32 |
-
```
|
33 |
-
|
34 |
-
|
|
|
35 |
|
36 |
-
```
|
|
|
|
|
|
5 |
Краткая инструкция по установке
|
6 |
Обновите систему и установите git-lfs:
|
7 |
|
8 |
+
```
|
9 |
+
apt update
|
10 |
+
apt install git-lfs
|
11 |
+
```
|
12 |
Обновите pip и установите требуемые пакеты:
|
13 |
|
14 |
+
```
|
15 |
+
python -m pip install --upgrade pip
|
16 |
+
pip install -r requirements.txt
|
17 |
+
```
|
18 |
Настройте git-credentials:
|
19 |
|
20 |
+
```
|
21 |
+
git config --global credential.helper store
|
22 |
+
```
|
23 |
Клонируйте репозиторий:
|
24 |
|
25 |
+
```
|
26 |
+
git clone https://huggingface.co/AiArtLab/sdxs
|
27 |
+
cd sdxs/
|
28 |
+
```
|
29 |
Подготовьте датасет:
|
30 |
|
31 |
+
```
|
32 |
+
mkdir datasets
|
33 |
cd datasets
|
34 |
+
git clone https://huggingface.co/datasets/AiArtLab/imagenet-1kk
|
35 |
+
```
|
36 |
Выполните вход в сервисы:
|
37 |
|
38 |
+
```
|
39 |
+
huggingface-cli login
|
40 |
+
wandb login
|
41 |
+
```
|
42 |
Настройте accelerate для многокарточного обучения:
|
43 |
|
44 |
+
```
|
45 |
+
accelerate config
|
46 |
+
```
|
47 |
+
Запустите обучение!
|
48 |
|
49 |
+
```
|
50 |
+
nohup accelerate launch train.py &
|
51 |
+
```
|
dataset_folder.py → dataset_fromfolder.py
RENAMED
File without changes
|
samples/sdxs_192x320_0.jpg
CHANGED
![]() |
![]() |
samples/sdxs_192x384_0.jpg
CHANGED
![]() |
![]() |
samples/sdxs_256x320_0.jpg
CHANGED
![]() |
![]() |
samples/sdxs_256x384_0.jpg
CHANGED
![]() |
![]() |
samples/sdxs_320x192_0.jpg
CHANGED
![]() |
![]() |
samples/sdxs_320x256_0.jpg
CHANGED
![]() |
![]() |
samples/sdxs_320x320_0.jpg
CHANGED
![]() |
![]() |
samples/sdxs_320x384_0.jpg
CHANGED
![]() |
![]() |
samples/sdxs_384x192_0.jpg
CHANGED
![]() |
![]() |
samples/sdxs_384x256_0.jpg
CHANGED
![]() |
![]() |
samples/sdxs_384x320_0.jpg
CHANGED
![]() |
![]() |
samples/sdxs_384x384_0.jpg
CHANGED
![]() |
![]() |
train.py
CHANGED
@@ -22,12 +22,12 @@ from diffusers.models.attention_processor import AttnProcessor2_0
|
|
22 |
|
23 |
# --------------------------- Параметры ---------------------------
|
24 |
save_path = "datasets/dataset384_temp" #"datasets/dataset384" #"datasets/imagenet-1kk" #"datasets/siski576" #"datasets/siski384" #"datasets/siski64" #"datasets/mnist"
|
25 |
-
batch_size =
|
26 |
base_learning_rate = 0 #5e-5 #8e-5
|
27 |
min_learning_rate = 2.5e-5 #2e-5
|
28 |
-
num_epochs =
|
29 |
project = "sdxs"
|
30 |
-
use_wandb =
|
31 |
limit = 0
|
32 |
save_model = True
|
33 |
checkpoints_folder = ""
|
@@ -266,6 +266,7 @@ if lowram:
|
|
266 |
from transformers.optimization import Adafactor
|
267 |
|
268 |
# [1] Создаем словарь оптимизаторов (для каждого параметра)
|
|
|
269 |
optimizer_dict = {
|
270 |
p: Adafactor(
|
271 |
[p],
|
@@ -476,7 +477,7 @@ for epoch in range(start_epoch, start_epoch + num_epochs):
|
|
476 |
|
477 |
for step, (latents, embeddings) in enumerate(dataloader):
|
478 |
with accelerator.accumulate(unet):
|
479 |
-
if step ==
|
480 |
used_gb = torch.cuda.max_memory_allocated() / 1024**3
|
481 |
print(f"Шаг {step}: {used_gb:.2f} GB")
|
482 |
# Forward pass
|
|
|
22 |
|
23 |
# --------------------------- Параметры ---------------------------
|
24 |
save_path = "datasets/dataset384_temp" #"datasets/dataset384" #"datasets/imagenet-1kk" #"datasets/siski576" #"datasets/siski384" #"datasets/siski64" #"datasets/mnist"
|
25 |
+
batch_size = 33 #555 #35 #7
|
26 |
base_learning_rate = 0 #5e-5 #8e-5
|
27 |
min_learning_rate = 2.5e-5 #2e-5
|
28 |
+
num_epochs = 5 #36 #18
|
29 |
project = "sdxs"
|
30 |
+
use_wandb = True
|
31 |
limit = 0
|
32 |
save_model = True
|
33 |
checkpoints_folder = ""
|
|
|
266 |
from transformers.optimization import Adafactor
|
267 |
|
268 |
# [1] Создаем словарь оптимизаторов (для каждого параметра)
|
269 |
+
base_learning_rate = 0
|
270 |
optimizer_dict = {
|
271 |
p: Adafactor(
|
272 |
[p],
|
|
|
477 |
|
478 |
for step, (latents, embeddings) in enumerate(dataloader):
|
479 |
with accelerator.accumulate(unet):
|
480 |
+
if save_model == False and step == 3 :
|
481 |
used_gb = torch.cuda.max_memory_allocated() / 1024**3
|
482 |
print(f"Шаг {step}: {used_gb:.2f} GB")
|
483 |
# Forward pass
|