Skip to content

Commit

Permalink
Merge pull request #22 from Fangyh09/Fangyh09-patch-1
Browse files Browse the repository at this point in the history
Fix lint errors
  • Loading branch information
Fangyh09 authored Jan 5, 2024
2 parents b708888 + d67097c commit db25532
Showing 1 changed file with 2 additions and 5 deletions.
7 changes: 2 additions & 5 deletions torch_receptive_field/receptive_field.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,16 @@
import matplotlib.pyplot as plt
from torch.autograd import Variable
from matplotlib.animation import FuncAnimation

from collections import OrderedDict
import numpy as np


def check_same(stride):
if isinstance(stride, (list, tuple)):
assert (len(stride) == 2 and stride[0] == stride[1]) or (len(stride) == 3 and stride[0] == stride[1] and stride[1] == stride[2])
stride = stride[0]
return stride


def receptive_field(model, input_size, batch_size=-1, device="cuda"):
'''
:parameter
Expand Down Expand Up @@ -135,9 +135,6 @@ def hook(module, input, output):
line_new = "{:>20} {:>10} {:>10} {:>10} {:>15} ".format("Layer (type)", "map size", "start", "jump", "receptive_field")
print(line_new)
print("==============================================================================")
total_params = 0
total_output = 0
trainable_params = 0
for layer in receptive_field:
# input_shape, output_shape, trainable, nb_params
assert "start" in receptive_field[layer], layer
Expand Down

0 comments on commit db25532

Please sign in to comment.