EC2 Default User commited on
Commit
74526fe
·
1 Parent(s): 679d8ed

Fixing Bindings

Browse files
Files changed (44) hide show
  1. build/torch27-cxx11-cu118-x86_64-linux/activation/__init__.py +18 -0
  2. build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  3. build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  4. build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  5. build/torch27-cxx11-cu118-x86_64-linux/activation/{_activation_be5bedb_dirty.abi3.so → _activation_20250917153858.abi3.so} +2 -2
  6. build/torch27-cxx11-cu118-x86_64-linux/activation/_ops.py +3 -3
  7. build/torch27-cxx11-cu118-x86_64-linux/activation/layers.py +51 -0
  8. build/torch27-cxx11-cu126-x86_64-linux/activation/__init__.py +18 -0
  9. build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  10. build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  11. build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  12. build/torch27-cxx11-cu126-x86_64-linux/activation/{_activation_be5bedb_dirty.abi3.so → _activation_20250917153858.abi3.so} +2 -2
  13. build/torch27-cxx11-cu126-x86_64-linux/activation/_ops.py +3 -3
  14. build/torch27-cxx11-cu126-x86_64-linux/activation/layers.py +51 -0
  15. build/torch27-cxx11-cu128-x86_64-linux/activation/__init__.py +18 -0
  16. build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  17. build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  18. build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  19. build/torch27-cxx11-cu128-x86_64-linux/activation/{_activation_be5bedb_dirty.abi3.so → _activation_20250917153858.abi3.so} +2 -2
  20. build/torch27-cxx11-cu128-x86_64-linux/activation/_ops.py +3 -3
  21. build/torch27-cxx11-cu128-x86_64-linux/activation/layers.py +51 -0
  22. build/torch28-cxx11-cu126-x86_64-linux/activation/__init__.py +18 -0
  23. build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  24. build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  25. build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  26. build/torch28-cxx11-cu126-x86_64-linux/activation/{_activation_be5bedb_dirty.abi3.so → _activation_20250917153858.abi3.so} +2 -2
  27. build/torch28-cxx11-cu126-x86_64-linux/activation/_ops.py +3 -3
  28. build/torch28-cxx11-cu126-x86_64-linux/activation/layers.py +51 -0
  29. build/torch28-cxx11-cu128-x86_64-linux/activation/__init__.py +18 -0
  30. build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  31. build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  32. build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  33. build/torch28-cxx11-cu128-x86_64-linux/activation/_activation_20250917153858.abi3.so +3 -0
  34. build/torch28-cxx11-cu128-x86_64-linux/activation/_activation_be5bedb_dirty.abi3.so +0 -3
  35. build/torch28-cxx11-cu128-x86_64-linux/activation/_ops.py +3 -3
  36. build/torch28-cxx11-cu128-x86_64-linux/activation/layers.py +51 -0
  37. build/torch28-cxx11-cu129-x86_64-linux/activation/__init__.py +18 -0
  38. build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc +0 -0
  39. build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc +0 -0
  40. build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc +0 -0
  41. build/torch28-cxx11-cu129-x86_64-linux/activation/_activation_20250917153858.abi3.so +3 -0
  42. build/torch28-cxx11-cu129-x86_64-linux/activation/_activation_be5bedb_dirty.abi3.so +0 -3
  43. build/torch28-cxx11-cu129-x86_64-linux/activation/_ops.py +3 -3
  44. build/torch28-cxx11-cu129-x86_64-linux/activation/layers.py +51 -0
build/torch27-cxx11-cu118-x86_64-linux/activation/__init__.py CHANGED
@@ -30,6 +30,20 @@ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0)
30
  return out
31
 
32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
34
  ops.gelu_fast(out, x)
35
  return out
@@ -47,11 +61,15 @@ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
47
 
48
  __all__ = [
49
  "silu_and_mul",
 
50
  "gelu_and_mul",
51
  "gelu_tanh_and_mul",
52
  "fatrelu_and_mul",
53
  "gelu_fast",
54
  "gelu_new",
55
  "gelu_quick",
 
 
 
56
  "layers",
57
  ]
 
30
  return out
31
 
32
 
33
+ def gelu(out: torch.Tensor, x: torch.Tensor) -> None:
34
+ ops.gelu(out, x)
35
+ return out
36
+
37
+ def silu(out: torch.Tensor, x: torch.Tensor) -> None:
38
+ ops.silu(out, x)
39
+ return out
40
+
41
+
42
+ def gelu_tanh(out: torch.Tensor, x: torch.Tensor) -> None:
43
+ ops.gelu_tanh(out, x)
44
+ return out
45
+
46
+
47
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
48
  ops.gelu_fast(out, x)
49
  return out
 
61
 
62
  __all__ = [
63
  "silu_and_mul",
64
+ "mul_and_silu",
65
  "gelu_and_mul",
66
  "gelu_tanh_and_mul",
67
  "fatrelu_and_mul",
68
  "gelu_fast",
69
  "gelu_new",
70
  "gelu_quick",
71
+ "gelu_tanh",
72
+ "silu",
73
+ "gelu",
74
  "layers",
75
  ]
build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch27-cxx11-cu118-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch27-cxx11-cu118-x86_64-linux/activation/{_activation_be5bedb_dirty.abi3.so → _activation_20250917153858.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aee7c6869a9e318ad81cb84460c58ca0dac2dc85f4ed739b12fe57641f766332
3
- size 2546984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:618cdba5f19eabc1f9c1d33e130ef03ab1b11b52f1e7b00b73f2a10d5cf1e62f
3
+ size 2773664
build/torch27-cxx11-cu118-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_be5bedb_dirty
3
- ops = torch.ops._activation_be5bedb_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_be5bedb_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_20250917153858
3
+ ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_20250917153858::{op_name}"
build/torch27-cxx11-cu118-x86_64-linux/activation/layers.py CHANGED
@@ -23,6 +23,57 @@ class SiluAndMul(nn.Module):
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  class MulAndSilu(nn.Module):
28
  """An activation function for SwiGLU.
 
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
26
+ class Silu(nn.Module):
27
+ """An activation function for SiLU.
28
+
29
+ The function computes x -> silu(x).
30
+
31
+ Shapes:
32
+ x: (num_tokens, d) or (batch_size, seq_len, d)
33
+ return: (num_tokens, d) or (batch_size, seq_len, d)
34
+ """
35
+
36
+ can_torch_compile: bool = True
37
+
38
+ def forward(self, x: torch.Tensor):
39
+ out = torch.empty_like(x)
40
+ ops.silu(out, x)
41
+ return out
42
+
43
+ class Gelu(nn.Module):
44
+ """An activation function for GELU.
45
+
46
+ The function computes x -> gelu(x).
47
+
48
+ Shapes:
49
+ x: (num_tokens, d) or (batch_size, seq_len, d)
50
+ return: (num_tokens, d) or (batch_size, seq_len, d)
51
+ """
52
+
53
+ can_torch_compile: bool = True
54
+
55
+ def forward(self, x: torch.Tensor):
56
+ out = torch.empty_like(x)
57
+ ops.gelu(out, x)
58
+ return out
59
+
60
+ class GeluTanh(nn.Module):
61
+ """An activation function for GELU with `tanh` approximation.
62
+
63
+ The function computes x -> gelu_tanh(x).
64
+
65
+ Shapes:
66
+ x: (num_tokens, d) or (batch_size, seq_len, d)
67
+ return: (num_tokens, d) or (batch_size, seq_len, d)
68
+ """
69
+
70
+ can_torch_compile: bool = True
71
+
72
+ def forward(self, x: torch.Tensor):
73
+ out = torch.empty_like(x)
74
+ ops.gelu_tanh(out, x)
75
+ return out
76
+
77
 
78
  class MulAndSilu(nn.Module):
79
  """An activation function for SwiGLU.
build/torch27-cxx11-cu126-x86_64-linux/activation/__init__.py CHANGED
@@ -30,6 +30,20 @@ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0)
30
  return out
31
 
32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
34
  ops.gelu_fast(out, x)
35
  return out
@@ -47,11 +61,15 @@ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
47
 
48
  __all__ = [
49
  "silu_and_mul",
 
50
  "gelu_and_mul",
51
  "gelu_tanh_and_mul",
52
  "fatrelu_and_mul",
53
  "gelu_fast",
54
  "gelu_new",
55
  "gelu_quick",
 
 
 
56
  "layers",
57
  ]
 
30
  return out
31
 
32
 
33
+ def gelu(out: torch.Tensor, x: torch.Tensor) -> None:
34
+ ops.gelu(out, x)
35
+ return out
36
+
37
+ def silu(out: torch.Tensor, x: torch.Tensor) -> None:
38
+ ops.silu(out, x)
39
+ return out
40
+
41
+
42
+ def gelu_tanh(out: torch.Tensor, x: torch.Tensor) -> None:
43
+ ops.gelu_tanh(out, x)
44
+ return out
45
+
46
+
47
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
48
  ops.gelu_fast(out, x)
49
  return out
 
61
 
62
  __all__ = [
63
  "silu_and_mul",
64
+ "mul_and_silu",
65
  "gelu_and_mul",
66
  "gelu_tanh_and_mul",
67
  "fatrelu_and_mul",
68
  "gelu_fast",
69
  "gelu_new",
70
  "gelu_quick",
71
+ "gelu_tanh",
72
+ "silu",
73
+ "gelu",
74
  "layers",
75
  ]
build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch27-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch27-cxx11-cu126-x86_64-linux/activation/{_activation_be5bedb_dirty.abi3.so → _activation_20250917153858.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f075a6e0d47a2d382d16291b1c5d7d1d98111e2bbc5891b14b627e3c1778b699
3
- size 2621536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87ee9280b670b3323378c17d75ee7082f419987a568769fe8479bf509ee6c245
3
+ size 2852232
build/torch27-cxx11-cu126-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_be5bedb_dirty
3
- ops = torch.ops._activation_be5bedb_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_be5bedb_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_20250917153858
3
+ ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_20250917153858::{op_name}"
build/torch27-cxx11-cu126-x86_64-linux/activation/layers.py CHANGED
@@ -23,6 +23,57 @@ class SiluAndMul(nn.Module):
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  class MulAndSilu(nn.Module):
28
  """An activation function for SwiGLU.
 
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
26
+ class Silu(nn.Module):
27
+ """An activation function for SiLU.
28
+
29
+ The function computes x -> silu(x).
30
+
31
+ Shapes:
32
+ x: (num_tokens, d) or (batch_size, seq_len, d)
33
+ return: (num_tokens, d) or (batch_size, seq_len, d)
34
+ """
35
+
36
+ can_torch_compile: bool = True
37
+
38
+ def forward(self, x: torch.Tensor):
39
+ out = torch.empty_like(x)
40
+ ops.silu(out, x)
41
+ return out
42
+
43
+ class Gelu(nn.Module):
44
+ """An activation function for GELU.
45
+
46
+ The function computes x -> gelu(x).
47
+
48
+ Shapes:
49
+ x: (num_tokens, d) or (batch_size, seq_len, d)
50
+ return: (num_tokens, d) or (batch_size, seq_len, d)
51
+ """
52
+
53
+ can_torch_compile: bool = True
54
+
55
+ def forward(self, x: torch.Tensor):
56
+ out = torch.empty_like(x)
57
+ ops.gelu(out, x)
58
+ return out
59
+
60
+ class GeluTanh(nn.Module):
61
+ """An activation function for GELU with `tanh` approximation.
62
+
63
+ The function computes x -> gelu_tanh(x).
64
+
65
+ Shapes:
66
+ x: (num_tokens, d) or (batch_size, seq_len, d)
67
+ return: (num_tokens, d) or (batch_size, seq_len, d)
68
+ """
69
+
70
+ can_torch_compile: bool = True
71
+
72
+ def forward(self, x: torch.Tensor):
73
+ out = torch.empty_like(x)
74
+ ops.gelu_tanh(out, x)
75
+ return out
76
+
77
 
78
  class MulAndSilu(nn.Module):
79
  """An activation function for SwiGLU.
build/torch27-cxx11-cu128-x86_64-linux/activation/__init__.py CHANGED
@@ -30,6 +30,20 @@ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0)
30
  return out
31
 
32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
34
  ops.gelu_fast(out, x)
35
  return out
@@ -47,11 +61,15 @@ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
47
 
48
  __all__ = [
49
  "silu_and_mul",
 
50
  "gelu_and_mul",
51
  "gelu_tanh_and_mul",
52
  "fatrelu_and_mul",
53
  "gelu_fast",
54
  "gelu_new",
55
  "gelu_quick",
 
 
 
56
  "layers",
57
  ]
 
30
  return out
31
 
32
 
33
+ def gelu(out: torch.Tensor, x: torch.Tensor) -> None:
34
+ ops.gelu(out, x)
35
+ return out
36
+
37
+ def silu(out: torch.Tensor, x: torch.Tensor) -> None:
38
+ ops.silu(out, x)
39
+ return out
40
+
41
+
42
+ def gelu_tanh(out: torch.Tensor, x: torch.Tensor) -> None:
43
+ ops.gelu_tanh(out, x)
44
+ return out
45
+
46
+
47
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
48
  ops.gelu_fast(out, x)
49
  return out
 
61
 
62
  __all__ = [
63
  "silu_and_mul",
64
+ "mul_and_silu",
65
  "gelu_and_mul",
66
  "gelu_tanh_and_mul",
67
  "fatrelu_and_mul",
68
  "gelu_fast",
69
  "gelu_new",
70
  "gelu_quick",
71
+ "gelu_tanh",
72
+ "silu",
73
+ "gelu",
74
  "layers",
75
  ]
build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch27-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch27-cxx11-cu128-x86_64-linux/activation/{_activation_be5bedb_dirty.abi3.so → _activation_20250917153858.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cc2406aa2fa09dd7bc1fd5e87cdcdf55edfc7e0853fad5f977e2500e08fa8899
3
- size 3565432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28ca9a3e35c49ae49694d7c6c77f85f3664622cad9c857bf13dfbf3bc144ae1b
3
+ size 4127912
build/torch27-cxx11-cu128-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_be5bedb_dirty
3
- ops = torch.ops._activation_be5bedb_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_be5bedb_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_20250917153858
3
+ ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_20250917153858::{op_name}"
build/torch27-cxx11-cu128-x86_64-linux/activation/layers.py CHANGED
@@ -23,6 +23,57 @@ class SiluAndMul(nn.Module):
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  class MulAndSilu(nn.Module):
28
  """An activation function for SwiGLU.
 
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
26
+ class Silu(nn.Module):
27
+ """An activation function for SiLU.
28
+
29
+ The function computes x -> silu(x).
30
+
31
+ Shapes:
32
+ x: (num_tokens, d) or (batch_size, seq_len, d)
33
+ return: (num_tokens, d) or (batch_size, seq_len, d)
34
+ """
35
+
36
+ can_torch_compile: bool = True
37
+
38
+ def forward(self, x: torch.Tensor):
39
+ out = torch.empty_like(x)
40
+ ops.silu(out, x)
41
+ return out
42
+
43
+ class Gelu(nn.Module):
44
+ """An activation function for GELU.
45
+
46
+ The function computes x -> gelu(x).
47
+
48
+ Shapes:
49
+ x: (num_tokens, d) or (batch_size, seq_len, d)
50
+ return: (num_tokens, d) or (batch_size, seq_len, d)
51
+ """
52
+
53
+ can_torch_compile: bool = True
54
+
55
+ def forward(self, x: torch.Tensor):
56
+ out = torch.empty_like(x)
57
+ ops.gelu(out, x)
58
+ return out
59
+
60
+ class GeluTanh(nn.Module):
61
+ """An activation function for GELU with `tanh` approximation.
62
+
63
+ The function computes x -> gelu_tanh(x).
64
+
65
+ Shapes:
66
+ x: (num_tokens, d) or (batch_size, seq_len, d)
67
+ return: (num_tokens, d) or (batch_size, seq_len, d)
68
+ """
69
+
70
+ can_torch_compile: bool = True
71
+
72
+ def forward(self, x: torch.Tensor):
73
+ out = torch.empty_like(x)
74
+ ops.gelu_tanh(out, x)
75
+ return out
76
+
77
 
78
  class MulAndSilu(nn.Module):
79
  """An activation function for SwiGLU.
build/torch28-cxx11-cu126-x86_64-linux/activation/__init__.py CHANGED
@@ -30,6 +30,20 @@ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0)
30
  return out
31
 
32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
34
  ops.gelu_fast(out, x)
35
  return out
@@ -47,11 +61,15 @@ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
47
 
48
  __all__ = [
49
  "silu_and_mul",
 
50
  "gelu_and_mul",
51
  "gelu_tanh_and_mul",
52
  "fatrelu_and_mul",
53
  "gelu_fast",
54
  "gelu_new",
55
  "gelu_quick",
 
 
 
56
  "layers",
57
  ]
 
30
  return out
31
 
32
 
33
+ def gelu(out: torch.Tensor, x: torch.Tensor) -> None:
34
+ ops.gelu(out, x)
35
+ return out
36
+
37
+ def silu(out: torch.Tensor, x: torch.Tensor) -> None:
38
+ ops.silu(out, x)
39
+ return out
40
+
41
+
42
+ def gelu_tanh(out: torch.Tensor, x: torch.Tensor) -> None:
43
+ ops.gelu_tanh(out, x)
44
+ return out
45
+
46
+
47
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
48
  ops.gelu_fast(out, x)
49
  return out
 
61
 
62
  __all__ = [
63
  "silu_and_mul",
64
+ "mul_and_silu",
65
  "gelu_and_mul",
66
  "gelu_tanh_and_mul",
67
  "fatrelu_and_mul",
68
  "gelu_fast",
69
  "gelu_new",
70
  "gelu_quick",
71
+ "gelu_tanh",
72
+ "silu",
73
+ "gelu",
74
  "layers",
75
  ]
build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch28-cxx11-cu126-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch28-cxx11-cu126-x86_64-linux/activation/{_activation_be5bedb_dirty.abi3.so → _activation_20250917153858.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c88e87951b92ea55313ef79a34d284cb2a23713d3bdafee735caa4fc955b9dcb
3
- size 2610616
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0fcd47dd765bba10bb09f65388f6c1b357b117b2611c17bae5bf8214499a9e39
3
+ size 2837224
build/torch28-cxx11-cu126-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_be5bedb_dirty
3
- ops = torch.ops._activation_be5bedb_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_be5bedb_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_20250917153858
3
+ ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_20250917153858::{op_name}"
build/torch28-cxx11-cu126-x86_64-linux/activation/layers.py CHANGED
@@ -23,6 +23,57 @@ class SiluAndMul(nn.Module):
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  class MulAndSilu(nn.Module):
28
  """An activation function for SwiGLU.
 
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
26
+ class Silu(nn.Module):
27
+ """An activation function for SiLU.
28
+
29
+ The function computes x -> silu(x).
30
+
31
+ Shapes:
32
+ x: (num_tokens, d) or (batch_size, seq_len, d)
33
+ return: (num_tokens, d) or (batch_size, seq_len, d)
34
+ """
35
+
36
+ can_torch_compile: bool = True
37
+
38
+ def forward(self, x: torch.Tensor):
39
+ out = torch.empty_like(x)
40
+ ops.silu(out, x)
41
+ return out
42
+
43
+ class Gelu(nn.Module):
44
+ """An activation function for GELU.
45
+
46
+ The function computes x -> gelu(x).
47
+
48
+ Shapes:
49
+ x: (num_tokens, d) or (batch_size, seq_len, d)
50
+ return: (num_tokens, d) or (batch_size, seq_len, d)
51
+ """
52
+
53
+ can_torch_compile: bool = True
54
+
55
+ def forward(self, x: torch.Tensor):
56
+ out = torch.empty_like(x)
57
+ ops.gelu(out, x)
58
+ return out
59
+
60
+ class GeluTanh(nn.Module):
61
+ """An activation function for GELU with `tanh` approximation.
62
+
63
+ The function computes x -> gelu_tanh(x).
64
+
65
+ Shapes:
66
+ x: (num_tokens, d) or (batch_size, seq_len, d)
67
+ return: (num_tokens, d) or (batch_size, seq_len, d)
68
+ """
69
+
70
+ can_torch_compile: bool = True
71
+
72
+ def forward(self, x: torch.Tensor):
73
+ out = torch.empty_like(x)
74
+ ops.gelu_tanh(out, x)
75
+ return out
76
+
77
 
78
  class MulAndSilu(nn.Module):
79
  """An activation function for SwiGLU.
build/torch28-cxx11-cu128-x86_64-linux/activation/__init__.py CHANGED
@@ -30,6 +30,20 @@ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0)
30
  return out
31
 
32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
34
  ops.gelu_fast(out, x)
35
  return out
@@ -47,11 +61,15 @@ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
47
 
48
  __all__ = [
49
  "silu_and_mul",
 
50
  "gelu_and_mul",
51
  "gelu_tanh_and_mul",
52
  "fatrelu_and_mul",
53
  "gelu_fast",
54
  "gelu_new",
55
  "gelu_quick",
 
 
 
56
  "layers",
57
  ]
 
30
  return out
31
 
32
 
33
+ def gelu(out: torch.Tensor, x: torch.Tensor) -> None:
34
+ ops.gelu(out, x)
35
+ return out
36
+
37
+ def silu(out: torch.Tensor, x: torch.Tensor) -> None:
38
+ ops.silu(out, x)
39
+ return out
40
+
41
+
42
+ def gelu_tanh(out: torch.Tensor, x: torch.Tensor) -> None:
43
+ ops.gelu_tanh(out, x)
44
+ return out
45
+
46
+
47
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
48
  ops.gelu_fast(out, x)
49
  return out
 
61
 
62
  __all__ = [
63
  "silu_and_mul",
64
+ "mul_and_silu",
65
  "gelu_and_mul",
66
  "gelu_tanh_and_mul",
67
  "fatrelu_and_mul",
68
  "gelu_fast",
69
  "gelu_new",
70
  "gelu_quick",
71
+ "gelu_tanh",
72
+ "silu",
73
+ "gelu",
74
  "layers",
75
  ]
build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch28-cxx11-cu128-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch28-cxx11-cu128-x86_64-linux/activation/_activation_20250917153858.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e6d88c71eebabc842f6a566de7cfaf24d3d90a30572eae584a3b51dcb7e838e
3
+ size 4117000
build/torch28-cxx11-cu128-x86_64-linux/activation/_activation_be5bedb_dirty.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:cf784c7ab178c476fc6268efe820b1948c7c5b8f049c046c851b03067da5dd59
3
- size 3558616
 
 
 
 
build/torch28-cxx11-cu128-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_be5bedb_dirty
3
- ops = torch.ops._activation_be5bedb_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_be5bedb_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_20250917153858
3
+ ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_20250917153858::{op_name}"
build/torch28-cxx11-cu128-x86_64-linux/activation/layers.py CHANGED
@@ -23,6 +23,57 @@ class SiluAndMul(nn.Module):
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  class MulAndSilu(nn.Module):
28
  """An activation function for SwiGLU.
 
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
26
+ class Silu(nn.Module):
27
+ """An activation function for SiLU.
28
+
29
+ The function computes x -> silu(x).
30
+
31
+ Shapes:
32
+ x: (num_tokens, d) or (batch_size, seq_len, d)
33
+ return: (num_tokens, d) or (batch_size, seq_len, d)
34
+ """
35
+
36
+ can_torch_compile: bool = True
37
+
38
+ def forward(self, x: torch.Tensor):
39
+ out = torch.empty_like(x)
40
+ ops.silu(out, x)
41
+ return out
42
+
43
+ class Gelu(nn.Module):
44
+ """An activation function for GELU.
45
+
46
+ The function computes x -> gelu(x).
47
+
48
+ Shapes:
49
+ x: (num_tokens, d) or (batch_size, seq_len, d)
50
+ return: (num_tokens, d) or (batch_size, seq_len, d)
51
+ """
52
+
53
+ can_torch_compile: bool = True
54
+
55
+ def forward(self, x: torch.Tensor):
56
+ out = torch.empty_like(x)
57
+ ops.gelu(out, x)
58
+ return out
59
+
60
+ class GeluTanh(nn.Module):
61
+ """An activation function for GELU with `tanh` approximation.
62
+
63
+ The function computes x -> gelu_tanh(x).
64
+
65
+ Shapes:
66
+ x: (num_tokens, d) or (batch_size, seq_len, d)
67
+ return: (num_tokens, d) or (batch_size, seq_len, d)
68
+ """
69
+
70
+ can_torch_compile: bool = True
71
+
72
+ def forward(self, x: torch.Tensor):
73
+ out = torch.empty_like(x)
74
+ ops.gelu_tanh(out, x)
75
+ return out
76
+
77
 
78
  class MulAndSilu(nn.Module):
79
  """An activation function for SwiGLU.
build/torch28-cxx11-cu129-x86_64-linux/activation/__init__.py CHANGED
@@ -30,6 +30,20 @@ def fatrelu_and_mul(out: torch.Tensor, x: torch.Tensor, threshold: float = 0.0)
30
  return out
31
 
32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
34
  ops.gelu_fast(out, x)
35
  return out
@@ -47,11 +61,15 @@ def gelu_quick(out: torch.Tensor, x: torch.Tensor) -> None:
47
 
48
  __all__ = [
49
  "silu_and_mul",
 
50
  "gelu_and_mul",
51
  "gelu_tanh_and_mul",
52
  "fatrelu_and_mul",
53
  "gelu_fast",
54
  "gelu_new",
55
  "gelu_quick",
 
 
 
56
  "layers",
57
  ]
 
30
  return out
31
 
32
 
33
+ def gelu(out: torch.Tensor, x: torch.Tensor) -> None:
34
+ ops.gelu(out, x)
35
+ return out
36
+
37
+ def silu(out: torch.Tensor, x: torch.Tensor) -> None:
38
+ ops.silu(out, x)
39
+ return out
40
+
41
+
42
+ def gelu_tanh(out: torch.Tensor, x: torch.Tensor) -> None:
43
+ ops.gelu_tanh(out, x)
44
+ return out
45
+
46
+
47
  def gelu_fast(out: torch.Tensor, x: torch.Tensor) -> None:
48
  ops.gelu_fast(out, x)
49
  return out
 
61
 
62
  __all__ = [
63
  "silu_and_mul",
64
+ "mul_and_silu",
65
  "gelu_and_mul",
66
  "gelu_tanh_and_mul",
67
  "fatrelu_and_mul",
68
  "gelu_fast",
69
  "gelu_new",
70
  "gelu_quick",
71
+ "gelu_tanh",
72
+ "silu",
73
+ "gelu",
74
  "layers",
75
  ]
build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc and b/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/__init__.cpython-313.pyc differ
 
build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc and b/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/_ops.cpython-313.pyc differ
 
build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc CHANGED
Binary files a/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc and b/build/torch28-cxx11-cu129-x86_64-linux/activation/__pycache__/layers.cpython-313.pyc differ
 
build/torch28-cxx11-cu129-x86_64-linux/activation/_activation_20250917153858.abi3.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3c1b86db31b04bd5fe75b0c9d6915ba2766a2456ea9bd1a20f2d75c4b1acf35
3
+ size 4154880
build/torch28-cxx11-cu129-x86_64-linux/activation/_activation_be5bedb_dirty.abi3.so DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e7cca3169eea8cbd67c61706d102548e49aadc936f8c2943efef3e7c4c0ee0d
3
- size 3592400
 
 
 
 
build/torch28-cxx11-cu129-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_be5bedb_dirty
3
- ops = torch.ops._activation_be5bedb_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_be5bedb_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_20250917153858
3
+ ops = torch.ops._activation_20250917153858
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_20250917153858::{op_name}"
build/torch28-cxx11-cu129-x86_64-linux/activation/layers.py CHANGED
@@ -23,6 +23,57 @@ class SiluAndMul(nn.Module):
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  class MulAndSilu(nn.Module):
28
  """An activation function for SwiGLU.
 
23
  ops.silu_and_mul(out, x)
24
  return out
25
 
26
+ class Silu(nn.Module):
27
+ """An activation function for SiLU.
28
+
29
+ The function computes x -> silu(x).
30
+
31
+ Shapes:
32
+ x: (num_tokens, d) or (batch_size, seq_len, d)
33
+ return: (num_tokens, d) or (batch_size, seq_len, d)
34
+ """
35
+
36
+ can_torch_compile: bool = True
37
+
38
+ def forward(self, x: torch.Tensor):
39
+ out = torch.empty_like(x)
40
+ ops.silu(out, x)
41
+ return out
42
+
43
+ class Gelu(nn.Module):
44
+ """An activation function for GELU.
45
+
46
+ The function computes x -> gelu(x).
47
+
48
+ Shapes:
49
+ x: (num_tokens, d) or (batch_size, seq_len, d)
50
+ return: (num_tokens, d) or (batch_size, seq_len, d)
51
+ """
52
+
53
+ can_torch_compile: bool = True
54
+
55
+ def forward(self, x: torch.Tensor):
56
+ out = torch.empty_like(x)
57
+ ops.gelu(out, x)
58
+ return out
59
+
60
+ class GeluTanh(nn.Module):
61
+ """An activation function for GELU with `tanh` approximation.
62
+
63
+ The function computes x -> gelu_tanh(x).
64
+
65
+ Shapes:
66
+ x: (num_tokens, d) or (batch_size, seq_len, d)
67
+ return: (num_tokens, d) or (batch_size, seq_len, d)
68
+ """
69
+
70
+ can_torch_compile: bool = True
71
+
72
+ def forward(self, x: torch.Tensor):
73
+ out = torch.empty_like(x)
74
+ ops.gelu_tanh(out, x)
75
+ return out
76
+
77
 
78
  class MulAndSilu(nn.Module):
79
  """An activation function for SwiGLU.