Update model.py
Browse files
model.py
CHANGED
@@ -4,7 +4,7 @@ import torch.nn.functional as F
|
|
4 |
|
5 |
# βββ Residual Pocket Block βββββββββββββββββββββββββββββββββββ
|
6 |
class BottleneckResBlock(nn.Module):
|
7 |
-
def __init__(self, dim, kernel=3, dropout=0.
|
8 |
super().__init__()
|
9 |
self.norm = nn.LayerNorm(dim)
|
10 |
self.conv = nn.Conv1d(dim, dim, kernel_size=kernel, padding=kernel // 2, groups=1)
|
@@ -36,7 +36,7 @@ class TwoStreamShuntAdapter(nn.Module):
|
|
36 |
|
37 |
use_norm = config.get("layer_norm", True)
|
38 |
use_do = config.get("use_dropout", True)
|
39 |
-
do_p = config.get("dropout", 0.
|
40 |
proj_depth = config.get("proj_layers", 2)
|
41 |
|
42 |
def build_projection(input_dim, output_dim):
|
|
|
4 |
|
5 |
# βββ Residual Pocket Block βββββββββββββββββββββββββββββββββββ
|
6 |
class BottleneckResBlock(nn.Module):
|
7 |
+
def __init__(self, dim, kernel=3, dropout=0.0):
|
8 |
super().__init__()
|
9 |
self.norm = nn.LayerNorm(dim)
|
10 |
self.conv = nn.Conv1d(dim, dim, kernel_size=kernel, padding=kernel // 2, groups=1)
|
|
|
36 |
|
37 |
use_norm = config.get("layer_norm", True)
|
38 |
use_do = config.get("use_dropout", True)
|
39 |
+
do_p = config.get("dropout", 0.0)
|
40 |
proj_depth = config.get("proj_layers", 2)
|
41 |
|
42 |
def build_projection(input_dim, output_dim):
|