Skip to content

Commit 26a1caf

Browse files
committed
fix bug of import
1 parent f327122 commit 26a1caf

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

python/paddle/distributed/fleet/meta_optimizers/lamb_optimizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ def apply_gradients(self, params_grads):
110110
return self.lamb_opt.apply_gradients(params_grads=params_grads)
111111

112112
def apply_optimize(self, loss, startup_program, params_grads):
113-
return self.lamb_opt.apply_optimize(
113+
return self.lamb_opt._apply_optimize(
114114
loss, startup_program=startup_program, params_grads=params_grads
115115
)
116116

test/amp/test_amp_api.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ def check_results(
7070
)
7171
out = model(x)
7272
loss = paddle.mean(out)
73-
optimizer = paddle.fluid.optimizer.Adadelta(learning_rate=0.001)
73+
optimizer = paddle.optimizer.Adadelta(learning_rate=0.001)
7474
optimizer = paddle.static.amp.decorate(
7575
optimizer,
7676
init_loss_scaling=128.0,

0 commit comments

Comments
 (0)