Skip to content

Commit 44ea99a

Browse files
committed
Fix batchnorm bias bug.
1 parent 780e008 commit 44ea99a

File tree

1 file changed

+1
-2
lines changed

1 file changed

+1
-2
lines changed

onnx2pytorch/operations/batchnorm.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@ class _LazyBatchNorm(_LazyNormBase, _BatchNorm):
1010

1111
cls_to_become = _BatchNorm
1212

13-
1413
except ImportError:
1514
# for torch < 1.10.0
1615
from torch.nn.modules.batchnorm import _LazyBatchNorm
@@ -56,7 +55,7 @@ def forward(self, X, scale=None, B=None, input_mean=None, input_var=None):
5655
if scale is not None:
5756
getattr(self.bnu, "weight").data = scale
5857
if B is not None:
59-
getattr(self.bnu, "bias").data = scale
58+
getattr(self.bnu, "bias").data = B
6059
if input_mean is not None:
6160
getattr(self.bnu, "running_mean").data = input_mean
6261
if input_var is not None:

0 commit comments

Comments
 (0)