Skip to content

Commit

Permalink
Apply fix for paddle.fluid import
Browse files Browse the repository at this point in the history
  • Loading branch information
piotrekobi committed Sep 22, 2021
1 parent 314f214 commit fc3b122
Showing 1 changed file with 11 additions and 13 deletions.
24 changes: 11 additions & 13 deletions python/paddle/fluid/dygraph/amp/auto_cast.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import paddle
import operator
import types
import paddle.fluid as fluid

__all__ = ['amp_guard', 'amp_decorate']

Expand Down Expand Up @@ -220,16 +219,16 @@ def amp_guard(enable=True,
.. code-block:: python
import numpy as np
import paddle.fluid as fluid
import paddle
data = np.random.uniform(-1, 1, [10, 3, 32, 32]).astype('float32')
with fluid.dygraph.guard():
conv2d = fluid.dygraph.Conv2D(3, 2, 3)
data = fluid.dygraph.to_variable(data)
with fluid.dygraph.amp_guard():
with paddle.fluid.dygraph.guard():
conv2d = paddle.fluid.dygraph.Conv2D(3, 2, 3)
data = paddle.fluid.dygraph.to_variable(data)
with paddle.fluid.dygraph.amp_guard():
conv = conv2d(data)
print(conv.dtype) # FP16
with fluid.dygraph.amp_guard(enable=False):
with paddle.fluid.dygraph.amp_guard(enable=False):
conv = conv2d(data)
print(conv.dtype) # FP32
Expand Down Expand Up @@ -301,7 +300,7 @@ def __init__(self, save_dtype):
def __call__(self, state_dict):
for key in state_dict:
param = state_dict[key]
with fluid.dygraph.guard():
with paddle.fluid.dygraph.guard():
param_applied = paddle.cast(param, self._save_dtype)
param_applied.name = param.name
state_dict[key] = param_applied
Expand Down Expand Up @@ -335,16 +334,15 @@ def amp_decorate(models,
# required: gpu
# Demo1: single model and optimizer:
import paddle
import paddle.fluid as fluid
model = paddle.nn.Conv2D(3, 2, 3, bias_attr=False)
optimzier = paddle.optimizer.SGD(parameters=model.parameters())
model, optimizer = fluid.dygraph.amp_decorate(models=model, optimizers=optimzier, level='O2')
model, optimizer = paddle.fluid.dygraph.amp_decorate(models=model, optimizers=optimzier, level='O2')
data = paddle.rand([10, 3, 32, 32])
with fluid.dygraph.amp_guard(enable=True, custom_white_list=None, custom_black_list=None, level='O2'):
with paddle.fluid.dygraph.amp_guard(enable=True, custom_white_list=None, custom_black_list=None, level='O2'):
output = model(data)
print(output.dtype) # FP16
Expand All @@ -353,11 +351,11 @@ def amp_decorate(models,
model2 = paddle.nn.Conv2D(3, 2, 3, bias_attr=False)
optimizer2 = paddle.optimizer.Adam(parameters=model2.parameters())
models, optimizers = fluid.dygraph.amp_decorate(models=[model, model2], optimizers=[optimzier, optimizer2], level='O2')
models, optimizers = paddle.fluid.dygraph.amp_decorate(models=[model, model2], optimizers=[optimzier, optimizer2], level='O2')
data = paddle.rand([10, 3, 32, 32])
with fluid.dygraph.amp_guard(enable=True, custom_white_list=None, custom_black_list=None, level='O2'):
with paddle.fluid.dygraph.amp_guard(enable=True, custom_white_list=None, custom_black_list=None, level='O2'):
output = models[0](data)
output2 = models[1](data)
print(output.dtype) # FP16
Expand Down

0 comments on commit fc3b122

Please sign in to comment.