iamwyldecat commited on
Commit
f72121c
·
1 Parent(s): afd2a56

chore(poly-norm): update README and build artifacts

Browse files
README.md CHANGED
@@ -1,4 +1,26 @@
1
  ---
2
  tags:
3
  - kernel
4
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
  tags:
3
  - kernel
4
+ ---
5
+
6
+ # Activation
7
+
8
+ Activation is a python package that contains custom CUDA-based activation kernels, primarily targeting AMD GPUs.
9
+
10
+ - Currently implemented
11
+ - [PolyNorm](https://arxiv.org/html/2411.03884v1)
12
+
13
+ ## Usage
14
+
15
+ ```python
16
+ import torch
17
+ from kernels import get_kernel
18
+
19
+ activation = get_kernel("motif-technologies/activation")
20
+
21
+ torch.set_default_device("cuda")
22
+ poly_norm = activation.layers.PolyNorm(eps=1e-6)
23
+ x = torch.randn(10, 10)
24
+
25
+ print(poly_norm(x))
26
+ ```
build/torch26-cxx11-rocm62-x86_64-linux/activation/{_activation_44e9845_dirty.abi3.so → _activation_afd2a56_dirty.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:527e5aac540e24dc3791fd423fea23f687ea3cffdb627c6a6e35f4df1aa7dec4
3
  size 2460736
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:786b0de9d0e49b4b35659ed6f257a4ed620186266cff7e909a69d4eca9ced89d
3
  size 2460736
build/torch26-cxx11-rocm62-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_44e9845_dirty
3
- ops = torch.ops._activation_44e9845_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_44e9845_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_afd2a56_dirty
3
+ ops = torch.ops._activation_afd2a56_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_afd2a56_dirty::{op_name}"
build/torch26-cxx11-rocm62-x86_64-linux/activation/layers.py CHANGED
@@ -5,7 +5,7 @@ from .poly_norm import PolyNormFunction
5
 
6
 
7
  class PolyNorm(nn.Module):
8
- def __init__(self, eps):
9
  super().__init__()
10
  self.weight = torch.nn.Parameter(torch.ones(3) / 3)
11
  self.bias = torch.nn.Parameter(torch.zeros(1))
 
5
 
6
 
7
  class PolyNorm(nn.Module):
8
+ def __init__(self, eps=1e-6):
9
  super().__init__()
10
  self.weight = torch.nn.Parameter(torch.ones(3) / 3)
11
  self.bias = torch.nn.Parameter(torch.zeros(1))
build/torch27-cxx11-rocm63-x86_64-linux/activation/{_activation_44e9845_dirty.abi3.so → _activation_afd2a56_dirty.abi3.so} RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b3e00863b72834e1d121e377e41724b1479703051aed7d9d8a64019d6a92bf54
3
  size 2447432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:56ecd54f469047866ceeefd9bfea7cfddd2126bc9c21ff110ba0308e337cf61d
3
  size 2447432
build/torch27-cxx11-rocm63-x86_64-linux/activation/_ops.py CHANGED
@@ -1,9 +1,9 @@
1
  import torch
2
- from . import _activation_44e9845_dirty
3
- ops = torch.ops._activation_44e9845_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
- return f"_activation_44e9845_dirty::{op_name}"
 
1
  import torch
2
+ from . import _activation_afd2a56_dirty
3
+ ops = torch.ops._activation_afd2a56_dirty
4
 
5
  def add_op_namespace_prefix(op_name: str):
6
  """
7
  Prefix op by namespace.
8
  """
9
+ return f"_activation_afd2a56_dirty::{op_name}"
build/torch27-cxx11-rocm63-x86_64-linux/activation/layers.py CHANGED
@@ -5,7 +5,7 @@ from .poly_norm import PolyNormFunction
5
 
6
 
7
  class PolyNorm(nn.Module):
8
- def __init__(self, eps):
9
  super().__init__()
10
  self.weight = torch.nn.Parameter(torch.ones(3) / 3)
11
  self.bias = torch.nn.Parameter(torch.zeros(1))
 
5
 
6
 
7
  class PolyNorm(nn.Module):
8
+ def __init__(self, eps=1e-6):
9
  super().__init__()
10
  self.weight = torch.nn.Parameter(torch.ones(3) / 3)
11
  self.bias = torch.nn.Parameter(torch.zeros(1))