cybertronai / autograd-lib

Geek Repo:Geek Repo

Github PK Tool:Github PK Tool

exception inside "module_hook" leaves old hooks hanging around

yaroslavvb opened this issue · comments

Get

Warning, seeing hooks from previous registration, call 'autograd_lib.reset'

This may be limitation of contextmanager decorator, may need to rewrite without one. To reproduce

# !pip install autograd_lib

from autograd_lib import autograd_lib

import torch
import math
from contextlib import contextmanager

import gc
import numpy as np

import random

import torch
from torch import nn
from collections import defaultdict
import time

import matplotlib
import matplotlib.pyplot as plt
import numpy as np

def simple_model(d, num_layers):
    """Creates simple linear neural network initialized to identity"""
    layers = []
    layer = nn.Linear(d, d, bias=False)
    layer.weight.data.copy_(torch.eye(d))
    for i in range(num_layers):
        layers.append(layer)
    return torch.nn.Sequential(*layers)

def least_squares(data, targets=None):
    """Least squares loss (like MSELoss, but an extra 1/2 factor."""
    if targets is None:
        targets = torch.zeros_like(data)
    err = data - targets.view(-1, data.shape[1])
    return torch.sum(err * err) / 2 / len(data)  # todo(y): replace with mean

depth = 1
width = 1  # 10k activations per layer
n = 1
model = simple_model(width, depth)
targets = torch.ones((n, width))
loss_fn = least_squares

B = 1
data = torch.randn(B, width)

activations = {}
def save_activations(layer, A, _):
    activations[layer] = A

autograd_lib.register(model)

try:
    with autograd_lib.module_hook(save_activations):
        y = model(data)
        2/0
except:
    pass

autograd_lib.register(model)
with autograd_lib.module_hook(save_activations):
    y = model(data)