Skip to content

Commit ec334e4

Browse files
committed
[20231219] updated model codes
1 parent 91b46d2 commit ec334e4

7 files changed

+6
-190
lines changed

models/AnomalyTransformer.py

100644100755
File mode changed.

models/LSTMEncDec.py

100644100755
File mode changed.

models/MLP.py

100644100755
+6-60
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,11 @@
99

1010

1111
class MLP(nn.Module):
12-
def __init__(self, seq_len, num_channels, latent_space_size, gamma, normalization="None",
13-
use_sigmoid_output=False, use_dropout=False, use_batchnorm=False):
12+
def __init__(self, seq_len, num_channels, latent_space_size, gamma, normalization="None"):
1413
super().__init__()
1514
self.L, self.C = seq_len, num_channels
16-
self.encoder = Encoder(seq_len*num_channels, latent_space_size, use_dropout, use_batchnorm)
17-
self.decoder = Decoder(seq_len*num_channels, latent_space_size, use_dropout, use_batchnorm)
15+
self.encoder = Encoder(seq_len*num_channels, latent_space_size)
16+
self.decoder = Decoder(seq_len*num_channels, latent_space_size)
1817
self.normalization = normalization
1918

2019
if self.normalization == "Detrend":
@@ -23,30 +22,24 @@ def __init__(self, seq_len, num_channels, latent_space_size, gamma, normalizatio
2322
else:
2423
self.use_normalizer = False
2524

26-
self.use_sigmoid_output = use_sigmoid_output
27-
if self.use_sigmoid_output:
28-
self.sigmoid = torch.nn.Sigmoid()
29-
30-
3125

3226
def forward(self, X):
3327
B, L, C = X.shape
3428
assert (L == self.L) and (C == self.C)
3529

3630
if self.use_normalizer:
3731
X = self.normalizer(X, "norm")
32+
3833
z = self.encoder(X.reshape(B, L*C))
3934
out = self.decoder(z).reshape(B, L, C)
4035

41-
if self.use_sigmoid_output:
42-
out = self.sigmoid(out)
4336
if self.use_normalizer:
4437
out = self.normalizer(out, "denorm")
4538
return out
4639

4740

4841
class Encoder(nn.Module):
49-
def __init__(self, input_size, latent_space_size, use_dropout=False, use_batchnorm=False):
42+
def __init__(self, input_size, latent_space_size):
5043
super().__init__()
5144
self.linear1 = nn.Linear(input_size, input_size // 2)
5245
self.relu1 = nn.ReLU()
@@ -56,78 +49,31 @@ def __init__(self, input_size, latent_space_size, use_dropout=False, use_batchno
5649
self.relu3 = nn.ReLU()
5750

5851

59-
self.use_dropout = use_dropout
60-
if self.use_dropout:
61-
self.dropout1 = nn.Dropout(p=0.2)
62-
self.dropout2 = nn.Dropout(p=0.2)
63-
self.dropout3 = nn.Dropout(p=0.2)
64-
65-
self.use_batchnorm = use_batchnorm
66-
if self.use_batchnorm:
67-
self.batchnorm1 = nn.BatchNorm1d(input_size//2)
68-
self.batchnorm2 = nn.BatchNorm1d(input_size//4)
69-
self.batchnorm3 = nn.BatchNorm1d(latent_space_size)
70-
71-
7252
def forward(self, x):
7353
x = self.linear1(x)
74-
if self.use_batchnorm:
75-
x = self.batchnorm1(x)
7654
x = self.relu1(x)
77-
if self.use_dropout:
78-
x = self.dropout1(x)
79-
8055
x = self.linear2(x)
81-
if self.use_batchnorm:
82-
x = self.batchnorm2(x)
8356
x = self.relu2(x)
84-
if self.use_dropout:
85-
x = self.dropout2(x)
86-
8757
x = self.linear3(x)
88-
if self.use_batchnorm:
89-
x = self.batchnorm3(x)
9058
x = self.relu3(x)
91-
if self.use_dropout:
92-
x = self.dropout3(x)
9359
return x
9460

9561

9662
class Decoder(nn.Module):
97-
def __init__(self, input_size, latent_space_size, use_dropout=False, use_batchnorm=False):
63+
def __init__(self, input_size, latent_space_size):
9864
super().__init__()
9965
self.linear1 = nn.Linear(latent_space_size, input_size // 4)
10066
self.relu1 = nn.ReLU()
10167
self.linear2 = nn.Linear(input_size // 4, input_size // 2)
10268
self.relu2 = nn.ReLU()
10369
self.linear3 = nn.Linear(input_size // 2, input_size)
10470

105-
self.use_dropout = use_dropout
106-
if self.use_dropout:
107-
self.dropout1 = nn.Dropout(p=0.2)
108-
self.dropout2 = nn.Dropout(p=0.2)
109-
self.dropout3 = nn.Dropout(p=0.2)
110-
111-
self.use_batchnorm = use_batchnorm
112-
if self.use_batchnorm:
113-
self.batchnorm1 = nn.BatchNorm1d(input_size//4)
114-
self.batchnorm2 = nn.BatchNorm1d(input_size//2)
11571

11672
def forward(self, x):
11773
x = self.linear1(x)
118-
if self.use_batchnorm:
119-
x = self.batchnorm1(x)
12074
x = self.relu1(x)
121-
if self.use_dropout:
122-
x = self.dropout1(x)
123-
12475
x = self.linear2(x)
125-
if self.use_batchnorm:
126-
x = self.batchnorm2(x)
12776
x = self.relu2(x)
128-
if self.use_dropout:
129-
x = self.dropout2(x)
130-
13177
out = self.linear3(x)
13278
return out
13379

models/Normalizer.py

100644100755
File mode changed.

models/OmniAnomaly.py

-130
This file was deleted.

models/THOC.py

100644100755
File mode changed.

models/USAD.py

100644100755
File mode changed.

0 commit comments

Comments
 (0)