KDD-OpenSource / DeepADoTS

Repository of the paper "A Systematic Evaluation of Deep Anomaly Detection Methods for Time Series".

Geek Repo:Geek Repo

Github PK Tool:Github PK Tool

Could you provide other examples similar to the sample code of mnist autoencoder

zulihit opened this issue · comments

commented

Thank you for your excellent work. Could you provide other examples similar to the sample code of mnist autoencoder, such as dagmm, Donut, LSTMAD? I tried it myself but found some problems

import pandas as pd
import tensorflow as tf
from sklearn.metrics import roc_auc_score

from src.algorithms import AutoEncoder
from src.datasets import Dataset

class MNIST(Dataset):
"""0 is the outlier class. The training set is free of outliers."""

def __init__(self, seed):
    super().__init__(name="MNIST", file_name='')  # We do not need to load data from a file
    self.seed = seed

def load(self):
    # 0 is the outlier, all other digits are normal
    OUTLIER_CLASS = 0
    mnist = tf.keras.datasets.mnist
    (x_train, y_train), (x_test, y_test) = mnist.load_data()
    # Label outliers with 1 and normal digits with 0
    y_train, y_test = (y_train == OUTLIER_CLASS), (y_test == OUTLIER_CLASS)
    x_train = x_train[~y_train]  # Remove outliers from the training set
    x_train, x_test = x_train / 255, x_test / 255
    x_train, x_test = x_train.reshape(-1, 784), x_test.reshape(-1, 784)
    self._data = tuple(pd.DataFrame(data=data) for data in [x_train, y_train, x_test, y_test])

x_train, y_train, x_test, y_test = MNIST(seed=0).data()

Use fewer instances for demonstration purposes

x_train, y_train = x_train[:1000], y_train[:1000]
x_test, y_test = x_test[:100], y_test[:100]

model = AutoEncoder(sequence_length=1, num_epochs=40, hidden_size=10, lr=1e-4)
model.fit(x_train)

error = model.predict(x_test)
print(roc_auc_score(y_test, error)) # e.g. 0.8614