Skip to content

Commit

Permalink
Fix Optimizer.set_learning_rate for int values
Browse files Browse the repository at this point in the history
  • Loading branch information
Speierers committed Sep 20, 2022
1 parent 69f39ce commit 53143db
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions src/python/python/ad/optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,8 +92,8 @@ def set_learning_rate(self, lr) -> None:
# We use `dr.opaque` so the that the JIT compiler does not include
# the learning rate as a scalar literal into generated code, which
# would defeat kernel caching when updating learning rates.
if isinstance(lr, float):
self.lr_default = lr
if isinstance(lr, float) or isinstance(lr, int):
self.lr_default = float(lr)
self.lr_default_v = dr.opaque(dr.detached_t(mi.Float), lr, shape=1)
elif isinstance(lr, dict):
for k, v in lr.items():
Expand Down

0 comments on commit 53143db

Please sign in to comment.