Skip to content

Commit

Permalink
solve the comments
Browse files Browse the repository at this point in the history
  • Loading branch information
FeixLiu committed Jun 2, 2021
1 parent f4a8eeb commit b7e2337
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 70 deletions.
3 changes: 2 additions & 1 deletion paddle/fluid/operators/optimizers/momentum_op.h
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,9 @@ class MomentumOp : public framework::OperatorWithKernel {

ctx->SetOutputDim("ParamOut", param_dim);
ctx->SetOutputDim("VelocityOut", param_dim);
if (ctx->HasOutput("MasterParamOut"))
if (ctx->HasOutput("MasterParamOut")) {
ctx->SetOutputDim("MasterParamOut", param_dim);
}
}

framework::OpKernelType GetExpectedKernelType(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,10 +73,7 @@ def layer_warp(block_func, input, ch_in, ch_out, count, stride):
return pool


def train(use_pure_fp16=True,
use_nesterov=False,
use_adam=False,
use_lars=False):
def train(use_pure_fp16=True, use_nesterov=False, optimizer=""):
classdim = 10
data_shape = [3, 32, 32]
BATCH_SIZE = 32
Expand All @@ -99,13 +96,13 @@ def train(use_pure_fp16=True,
# Test program
test_program = train_program.clone(for_test=True)

if use_adam:
if optimizer == "Adam":
optimizer = paddle.optimizer.AdamW(
learning_rate=0.001,
epsilon=1e-8,
weight_decay=0.0,
multi_precision=True)
elif use_lars:
elif optimizer == "Lars":
optimizer = paddle.fluid.optimizer.LarsMomentumOptimizer(
learning_rate=0.001,
momentum=0.9,
Expand Down Expand Up @@ -177,41 +174,27 @@ def test_resnet_pure_fp16(self):
if not fluid.core.is_compiled_with_cuda():
return

def do_test(use_nesterov=False, use_adam=False, use_lars=False):
assert not (use_adam and
use_lars), "cannot use adam and lars at the same time"
if use_adam:
def do_test(use_nesterov=False, optimizer=""):
if optimizer == "Adam":
suffix = "use Adam"
elif use_lars:
elif optimizer == "Lars":
suffix = "use Lars"
else:
suffix = "with Nesterov" if use_nesterov else "without Nesterov"
with self.scope_prog_guard():
print("-----------------FP16 Train {}-----------------".format(
suffix))
if use_lars:
train_loss_fp16, test_loss_fp16 = train(
use_pure_fp16=True,
use_nesterov=use_nesterov,
use_lars=use_lars)
else:
train_loss_fp16, test_loss_fp16 = train(
use_pure_fp16=True,
use_nesterov=use_nesterov,
use_adam=use_adam)
train_loss_fp16, test_loss_fp16 = train(
use_pure_fp16=True,
use_nesterov=use_nesterov,
optimizer=optimizer)
with self.scope_prog_guard():
print("-----------------FP32 Train {}-----------------".format(
suffix))
if use_lars:
train_loss_fp32, test_loss_fp32 = train(
use_pure_fp16=False,
use_nesterov=use_nesterov,
use_lars=use_lars)
else:
train_loss_fp32, test_loss_fp32 = train(
use_pure_fp16=False,
use_nesterov=use_nesterov,
use_adam=use_adam)
train_loss_fp32, test_loss_fp32 = train(
use_pure_fp16=False,
use_nesterov=use_nesterov,
optimizer=optimizer)

self.assertTrue(
np.allclose(
Expand All @@ -232,8 +215,8 @@ def do_test(use_nesterov=False, use_adam=False, use_lars=False):

do_test(use_nesterov=False)
do_test(use_nesterov=True)
do_test(use_adam=True)
do_test(use_lars=True)
do_test(optimizer="Adam")
do_test(optimizer="Lars")

@contextlib.contextmanager
def scope_prog_guard(self):
Expand Down
18 changes: 0 additions & 18 deletions python/paddle/fluid/tests/unittests/test_imperative_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -492,24 +492,6 @@ def test_momentum(self):
self._check_mlp()


class TestImperativeLarsMomentumOptimizerWithMP(TestImperativeOptimizerBase):
def get_optimizer_dygraph(self, parameter_list):
optimizer = LarsMomentumOptimizer(
learning_rate=0.001,
momentum=0.9,
parameter_list=parameter_list,
multi_precision=True)
return optimizer

def get_optimizer(self):
optimizer = LarsMomentumOptimizer(
learning_rate=0.001, momentum=0.9, multi_precision=True)
return optimizer

def test_larsmomentum(self):
self._check_mlp()


class TestImperativeLarsMomentumOptimizer(TestImperativeOptimizerBase):
def get_optimizer_dygraph(self, parameter_list):
optimizer = LarsMomentumOptimizer(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -581,24 +581,6 @@ def test_momentum(self):
self._check_mlp()


class TestImperativeLarsMomentumOptimizerWithMP(TestImperativeOptimizerBase):
def get_optimizer_dygraph(self, parameter_list):
optimizer = LarsMomentumOptimizer(
learning_rate=0.001,
momentum=0.9,
parameter_list=parameter_list,
multi_precision=True)
return optimizer

def get_optimizer(self):
optimizer = LarsMomentumOptimizer(
learning_rate=0.001, momentum=0.9, multi_precision=True)
return optimizer

def test_larsmomentum(self):
self._check_mlp()


class TestImperativeLarsMomentumOptimizer(TestImperativeOptimizerBase):
def get_optimizer_dygraph(self, parameter_list):
optimizer = LarsMomentumOptimizer(
Expand Down

1 comment on commit b7e2337

@paddle-bot-old
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Congratulation! Your pull request passed all required CI. You could ask reviewer(s) to approve and merge. 🎉

Please sign in to comment.