Fix bug in normalize to avoid divide by zero (#239)

Co-authored-by: rj <rj@teleopstrio-razer.lan>
Co-authored-by: Remi <re.cadene@gmail.com>
This commit is contained in:
Ruijie 2024-06-04 06:21:28 -04:00 committed by GitHub
parent bd3111f28b
commit b0d954c6e1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 1 additions and 1 deletions

View File

@ -147,7 +147,7 @@ class Normalize(nn.Module):
assert not torch.isinf(min).any(), _no_stats_error_str("min") assert not torch.isinf(min).any(), _no_stats_error_str("min")
assert not torch.isinf(max).any(), _no_stats_error_str("max") assert not torch.isinf(max).any(), _no_stats_error_str("max")
# normalize to [0,1] # normalize to [0,1]
batch[key] = (batch[key] - min) / (max - min) batch[key] = (batch[key] - min) / (max - min + 1e-8)
# normalize to [-1, 1] # normalize to [-1, 1]
batch[key] = batch[key] * 2 - 1 batch[key] = batch[key] * 2 - 1
else: else: