Commit
·
f72121c
1
Parent(s):
afd2a56
chore(poly-norm): update README and build artifacts
Browse files- README.md +23 -1
- build/torch26-cxx11-rocm62-x86_64-linux/activation/{_activation_44e9845_dirty.abi3.so → _activation_afd2a56_dirty.abi3.so} +1 -1
- build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py +3 -3
- build/torch26-cxx11-rocm62-x86_64-linux/activation/layers.py +1 -1
- build/torch27-cxx11-rocm63-x86_64-linux/activation/{_activation_44e9845_dirty.abi3.so → _activation_afd2a56_dirty.abi3.so} +1 -1
- build/torch27-cxx11-rocm63-x86_64-linux/activation/_ops.py +3 -3
- build/torch27-cxx11-rocm63-x86_64-linux/activation/layers.py +1 -1
README.md
CHANGED
@@ -1,4 +1,26 @@
|
|
1 |
---
|
2 |
tags:
|
3 |
- kernel
|
4 |
-
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
---
|
2 |
tags:
|
3 |
- kernel
|
4 |
+
---
|
5 |
+
|
6 |
+
# Activation
|
7 |
+
|
8 |
+
Activation is a python package that contains custom CUDA-based activation kernels, primarily targeting AMD GPUs.
|
9 |
+
|
10 |
+
- Currently implemented
|
11 |
+
- [PolyNorm](https://arxiv.org/html/2411.03884v1)
|
12 |
+
|
13 |
+
## Usage
|
14 |
+
|
15 |
+
```python
|
16 |
+
import torch
|
17 |
+
from kernels import get_kernel
|
18 |
+
|
19 |
+
activation = get_kernel("motif-technologies/activation")
|
20 |
+
|
21 |
+
torch.set_default_device("cuda")
|
22 |
+
poly_norm = activation.layers.PolyNorm(eps=1e-6)
|
23 |
+
x = torch.randn(10, 10)
|
24 |
+
|
25 |
+
print(poly_norm(x))
|
26 |
+
```
|
build/torch26-cxx11-rocm62-x86_64-linux/activation/{_activation_44e9845_dirty.abi3.so → _activation_afd2a56_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2460736
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:786b0de9d0e49b4b35659ed6f257a4ed620186266cff7e909a69d4eca9ced89d
|
3 |
size 2460736
|
build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_afd2a56_dirty
|
3 |
+
ops = torch.ops._activation_afd2a56_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_afd2a56_dirty::{op_name}"
|
build/torch26-cxx11-rocm62-x86_64-linux/activation/layers.py
CHANGED
@@ -5,7 +5,7 @@ from .poly_norm import PolyNormFunction
|
|
5 |
|
6 |
|
7 |
class PolyNorm(nn.Module):
|
8 |
-
def __init__(self, eps):
|
9 |
super().__init__()
|
10 |
self.weight = torch.nn.Parameter(torch.ones(3) / 3)
|
11 |
self.bias = torch.nn.Parameter(torch.zeros(1))
|
|
|
5 |
|
6 |
|
7 |
class PolyNorm(nn.Module):
|
8 |
+
def __init__(self, eps=1e-6):
|
9 |
super().__init__()
|
10 |
self.weight = torch.nn.Parameter(torch.ones(3) / 3)
|
11 |
self.bias = torch.nn.Parameter(torch.zeros(1))
|
build/torch27-cxx11-rocm63-x86_64-linux/activation/{_activation_44e9845_dirty.abi3.so → _activation_afd2a56_dirty.abi3.so}
RENAMED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2447432
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:56ecd54f469047866ceeefd9bfea7cfddd2126bc9c21ff110ba0308e337cf61d
|
3 |
size 2447432
|
build/torch27-cxx11-rocm63-x86_64-linux/activation/_ops.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1 |
import torch
|
2 |
-
from . import
|
3 |
-
ops = torch.ops.
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
-
return f"
|
|
|
1 |
import torch
|
2 |
+
from . import _activation_afd2a56_dirty
|
3 |
+
ops = torch.ops._activation_afd2a56_dirty
|
4 |
|
5 |
def add_op_namespace_prefix(op_name: str):
|
6 |
"""
|
7 |
Prefix op by namespace.
|
8 |
"""
|
9 |
+
return f"_activation_afd2a56_dirty::{op_name}"
|
build/torch27-cxx11-rocm63-x86_64-linux/activation/layers.py
CHANGED
@@ -5,7 +5,7 @@ from .poly_norm import PolyNormFunction
|
|
5 |
|
6 |
|
7 |
class PolyNorm(nn.Module):
|
8 |
-
def __init__(self, eps):
|
9 |
super().__init__()
|
10 |
self.weight = torch.nn.Parameter(torch.ones(3) / 3)
|
11 |
self.bias = torch.nn.Parameter(torch.zeros(1))
|
|
|
5 |
|
6 |
|
7 |
class PolyNorm(nn.Module):
|
8 |
+
def __init__(self, eps=1e-6):
|
9 |
super().__init__()
|
10 |
self.weight = torch.nn.Parameter(torch.ones(3) / 3)
|
11 |
self.bias = torch.nn.Parameter(torch.zeros(1))
|