Upload folder using huggingface_hub
Browse files- v2/log.txt +0 -0
- v2/model.pt +3 -0
- v2/vocab.alphabet +1 -0
- v3/8-aae-17_12_2024/log.txt +0 -0
- v3/8-aae-17_12_2024/model.pt +3 -0
- v3/8-laae-19_12_2024/log.txt +73 -0
- v3/8-laae-19_12_2024/vocab.alphabet +1 -0
- v3/8-laae-large_17_12_2024/log.txt +0 -0
- v3/8-vae-17_12_2024/log.txt +0 -0
- v3/8-vae-17_12_2024/model.pt +3 -0
v2/log.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
v2/model.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:60ad956750f5cb8843e9301be0d314dad53e1a26f25eb00d014a6a93d8ca0fa4
|
3 |
+
size 17422074
|
v2/vocab.alphabet
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"'`!^@#$%&.,?:;~-+*=_/\|[]{}()<>
|
v3/8-aae-17_12_2024/log.txt
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
v3/8-aae-17_12_2024/model.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e0f43a8232b0bfcd5cdecd21240e75ad82fe91855e4e55fb45bd26ddd24ac62e
|
3 |
+
size 4504504
|
v3/8-laae-19_12_2024/log.txt
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Namespace(alphabet=None, b1=0.5, b2=0.999, batch_size=16384, dim_d=512, dim_emb=64, dim_h=256, dim_z=64, dropout=0.3, epochs=20, lambda_adv=10.0, lambda_kl=0.0, lambda_p=0.1, load_model='', log_interval=100, lr=0.0005, max_len=8, model_type='aae', nlayers=1, no_cuda=False, noise=[0.0, 0.0, 0.0], save_dir='/mnt/hdd4/julia_dir/leo/pae/out/v3/8-laae-19_12_2024', train='/mnt/hdd4/julia_dir/leo/data/hashes.org-list/8/train.txt', valid='/mnt/hdd4/julia_dir/leo/data/hashes.org-list/8/valid.txt')
|
2 |
+
# train on cuda device
|
3 |
+
# vocab save /mnt/hdd4/julia_dir/leo/pae/out/v3/8-laae-19_12_2024/vocab.alphabet
|
4 |
+
# 168000000 train passwords was loaded
|
5 |
+
# 42553894 valid passwords was loaded
|
6 |
+
# model aae with parameters was init: 1124580
|
7 |
+
--------------------------------------------------------------------------------
|
8 |
+
| epoch 1 | 100/ 10254 batches | rec 33.27, adv 0.70, |lvar| 0.64, loss_d 1.41, loss 40.37,
|
9 |
+
| epoch 1 | 200/ 10254 batches | rec 30.43, adv 0.71, |lvar| 0.51, loss_d 1.40, loss 37.54,
|
10 |
+
| epoch 1 | 300/ 10254 batches | rec 28.55, adv 0.72, |lvar| 0.31, loss_d 1.38, loss 35.78,
|
11 |
+
| epoch 1 | 400/ 10254 batches | rec 27.20, adv 0.74, |lvar| 0.45, loss_d 1.37, loss 34.63,
|
12 |
+
| epoch 1 | 500/ 10254 batches | rec 25.98, adv 0.73, |lvar| 0.30, loss_d 1.36, loss 33.36,
|
13 |
+
| epoch 1 | 600/ 10254 batches | rec 25.45, adv 0.75, |lvar| 0.50, loss_d 1.36, loss 32.95,
|
14 |
+
| epoch 1 | 700/ 10254 batches | rec 24.91, adv 0.75, |lvar| 0.44, loss_d 1.35, loss 32.46,
|
15 |
+
| epoch 1 | 800/ 10254 batches | rec 24.63, adv 0.75, |lvar| 0.42, loss_d 1.35, loss 32.18,
|
16 |
+
| epoch 1 | 900/ 10254 batches | rec 24.41, adv 0.75, |lvar| 0.68, loss_d 1.35, loss 31.98,
|
17 |
+
| epoch 1 | 1000/ 10254 batches | rec 24.27, adv 0.75, |lvar| 1.04, loss_d 1.36, loss 31.91,
|
18 |
+
| epoch 1 | 1100/ 10254 batches | rec 24.11, adv 0.76, |lvar| 1.25, loss_d 1.35, loss 31.79,
|
19 |
+
| epoch 1 | 1200/ 10254 batches | rec 23.96, adv 0.75, |lvar| 2.21, loss_d 1.36, loss 31.67,
|
20 |
+
| epoch 1 | 1300/ 10254 batches | rec 23.73, adv 0.74, |lvar| 3.40, loss_d 1.36, loss 31.52,
|
21 |
+
| epoch 1 | 1400/ 10254 batches | rec 23.47, adv 0.74, |lvar| 4.55, loss_d 1.37, loss 31.30,
|
22 |
+
| epoch 1 | 1500/ 10254 batches | rec 23.34, adv 0.73, |lvar| 5.71, loss_d 1.38, loss 31.24,
|
23 |
+
| epoch 1 | 1600/ 10254 batches | rec 22.74, adv 0.73, |lvar| 7.08, loss_d 1.38, loss 30.73,
|
24 |
+
| epoch 1 | 1700/ 10254 batches | rec 22.14, adv 0.72, |lvar| 9.20, loss_d 1.38, loss 30.24,
|
25 |
+
| epoch 1 | 1800/ 10254 batches | rec 22.55, adv 0.72, |lvar| 10.71, loss_d 1.38, loss 30.81,
|
26 |
+
| epoch 1 | 1900/ 10254 batches | rec 21.01, adv 0.71, |lvar| 12.71, loss_d 1.38, loss 29.43,
|
27 |
+
| epoch 1 | 2000/ 10254 batches | rec 20.41, adv 0.72, |lvar| 14.06, loss_d 1.37, loss 29.00,
|
28 |
+
| epoch 1 | 2100/ 10254 batches | rec 19.82, adv 0.72, |lvar| 15.74, loss_d 1.37, loss 28.58,
|
29 |
+
| epoch 1 | 2200/ 10254 batches | rec 19.24, adv 0.72, |lvar| 17.34, loss_d 1.37, loss 28.16,
|
30 |
+
| epoch 1 | 2300/ 10254 batches | rec 18.77, adv 0.72, |lvar| 18.73, loss_d 1.37, loss 27.85,
|
31 |
+
| epoch 1 | 2400/ 10254 batches | rec 18.37, adv 0.72, |lvar| 19.88, loss_d 1.37, loss 27.57,
|
32 |
+
| epoch 1 | 2500/ 10254 batches | rec 18.06, adv 0.72, |lvar| 20.69, loss_d 1.37, loss 27.35,
|
33 |
+
| epoch 1 | 2600/ 10254 batches | rec 17.65, adv 0.72, |lvar| 21.89, loss_d 1.37, loss 27.05,
|
34 |
+
| epoch 1 | 2700/ 10254 batches | rec 17.16, adv 0.72, |lvar| 23.65, loss_d 1.38, loss 26.68,
|
35 |
+
| epoch 1 | 2800/ 10254 batches | rec 16.70, adv 0.72, |lvar| 24.96, loss_d 1.38, loss 26.35,
|
36 |
+
| epoch 1 | 2900/ 10254 batches | rec 16.16, adv 0.71, |lvar| 26.46, loss_d 1.38, loss 25.94,
|
37 |
+
| epoch 1 | 3000/ 10254 batches | rec 16.29, adv 0.71, |lvar| 27.74, loss_d 1.38, loss 26.16,
|
38 |
+
| epoch 1 | 3100/ 10254 batches | rec 15.18, adv 0.71, |lvar| 29.46, loss_d 1.38, loss 25.21,
|
39 |
+
| epoch 1 | 3200/ 10254 batches | rec 14.78, adv 0.71, |lvar| 30.43, loss_d 1.38, loss 24.93,
|
40 |
+
| epoch 1 | 3300/ 10254 batches | rec 14.34, adv 0.71, |lvar| 31.57, loss_d 1.38, loss 24.61,
|
41 |
+
| epoch 1 | 3400/ 10254 batches | rec 14.04, adv 0.71, |lvar| 32.53, loss_d 1.38, loss 24.41,
|
42 |
+
| epoch 1 | 3500/ 10254 batches | rec 13.73, adv 0.71, |lvar| 33.37, loss_d 1.38, loss 24.18,
|
43 |
+
| epoch 1 | 3600/ 10254 batches | rec 13.45, adv 0.71, |lvar| 34.05, loss_d 1.38, loss 23.97,
|
44 |
+
| epoch 1 | 3700/ 10254 batches | rec 13.17, adv 0.71, |lvar| 34.66, loss_d 1.37, loss 23.78,
|
45 |
+
| epoch 1 | 3800/ 10254 batches | rec 12.96, adv 0.71, |lvar| 35.27, loss_d 1.37, loss 23.63,
|
46 |
+
| epoch 1 | 3900/ 10254 batches | rec 12.75, adv 0.71, |lvar| 35.85, loss_d 1.37, loss 23.47,
|
47 |
+
| epoch 1 | 4000/ 10254 batches | rec 12.54, adv 0.71, |lvar| 36.29, loss_d 1.37, loss 23.31,
|
48 |
+
| epoch 1 | 4100/ 10254 batches | rec 12.36, adv 0.72, |lvar| 36.68, loss_d 1.37, loss 23.18,
|
49 |
+
| epoch 1 | 4200/ 10254 batches | rec 12.16, adv 0.72, |lvar| 37.06, loss_d 1.37, loss 23.02,
|
50 |
+
| epoch 1 | 4300/ 10254 batches | rec 12.02, adv 0.72, |lvar| 37.38, loss_d 1.37, loss 22.93,
|
51 |
+
| epoch 1 | 4400/ 10254 batches | rec 11.87, adv 0.72, |lvar| 37.69, loss_d 1.37, loss 22.81,
|
52 |
+
| epoch 1 | 4500/ 10254 batches | rec 11.75, adv 0.72, |lvar| 38.06, loss_d 1.37, loss 22.74,
|
53 |
+
| epoch 1 | 4600/ 10254 batches | rec 11.60, adv 0.72, |lvar| 38.38, loss_d 1.37, loss 22.63,
|
54 |
+
| epoch 1 | 4700/ 10254 batches | rec 11.50, adv 0.72, |lvar| 38.62, loss_d 1.37, loss 22.54,
|
55 |
+
| epoch 1 | 4800/ 10254 batches | rec 11.36, adv 0.72, |lvar| 38.99, loss_d 1.37, loss 22.43,
|
56 |
+
| epoch 1 | 4900/ 10254 batches | rec 11.26, adv 0.72, |lvar| 39.16, loss_d 1.37, loss 22.36,
|
57 |
+
| epoch 1 | 5000/ 10254 batches | rec 11.13, adv 0.72, |lvar| 39.38, loss_d 1.37, loss 22.26,
|
58 |
+
| epoch 1 | 5100/ 10254 batches | rec 11.00, adv 0.72, |lvar| 39.66, loss_d 1.37, loss 22.14,
|
59 |
+
| epoch 1 | 5200/ 10254 batches | rec 10.93, adv 0.72, |lvar| 39.86, loss_d 1.37, loss 22.11,
|
60 |
+
| epoch 1 | 5300/ 10254 batches | rec 10.86, adv 0.72, |lvar| 40.07, loss_d 1.37, loss 22.05,
|
61 |
+
| epoch 1 | 5400/ 10254 batches | rec 10.68, adv 0.72, |lvar| 40.31, loss_d 1.37, loss 21.90,
|
62 |
+
| epoch 1 | 5500/ 10254 batches | rec 10.63, adv 0.72, |lvar| 40.53, loss_d 1.37, loss 21.88,
|
63 |
+
| epoch 1 | 5600/ 10254 batches | rec 10.52, adv 0.72, |lvar| 40.74, loss_d 1.37, loss 21.79,
|
64 |
+
| epoch 1 | 5700/ 10254 batches | rec 10.47, adv 0.72, |lvar| 40.87, loss_d 1.37, loss 21.75,
|
65 |
+
| epoch 1 | 5800/ 10254 batches | rec 10.38, adv 0.72, |lvar| 41.08, loss_d 1.37, loss 21.67,
|
66 |
+
| epoch 1 | 5900/ 10254 batches | rec 10.25, adv 0.72, |lvar| 41.33, loss_d 1.37, loss 21.57,
|
67 |
+
| epoch 1 | 6000/ 10254 batches | rec 10.22, adv 0.72, |lvar| 41.41, loss_d 1.37, loss 21.55,
|
68 |
+
| epoch 1 | 6100/ 10254 batches | rec 10.13, adv 0.72, |lvar| 41.63, loss_d 1.37, loss 21.48,
|
69 |
+
| epoch 1 | 6200/ 10254 batches | rec 10.05, adv 0.72, |lvar| 41.88, loss_d 1.37, loss 21.43,
|
70 |
+
| epoch 1 | 6300/ 10254 batches | rec 9.93, adv 0.72, |lvar| 42.27, loss_d 1.37, loss 21.34,
|
71 |
+
| epoch 1 | 6400/ 10254 batches | rec 9.74, adv 0.72, |lvar| 42.82, loss_d 1.37, loss 21.19,
|
72 |
+
| epoch 1 | 6500/ 10254 batches | rec 9.57, adv 0.72, |lvar| 43.37, loss_d 1.37, loss 21.07,
|
73 |
+
| epoch 1 | 6600/ 10254 batches | rec 9.40, adv 0.72, |lvar| 43.77, loss_d 1.37, loss 20.94,
|
v3/8-laae-19_12_2024/vocab.alphabet
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"'`!^@#$%&.,?:;~-+*=_/\|[]{}()<>
|
v3/8-laae-large_17_12_2024/log.txt
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
v3/8-vae-17_12_2024/log.txt
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
v3/8-vae-17_12_2024/model.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a2d5faed19d5e5175f4708b3e732ffe563ecc4052408273f772c3e64cd769a58
|
3 |
+
size 4367876
|