Skip to content

Commit

Permalink
fix some flake8 errors
Browse files Browse the repository at this point in the history
  • Loading branch information
zhiboniu committed Jun 10, 2021
1 parent ec14baa commit bf931ab
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 22 deletions.
2 changes: 1 addition & 1 deletion python/paddle/hapi/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -630,7 +630,7 @@ def make_optimizer(parameters=None):
boundaries=boundaries, values=values)
learning_rate = paddle.optimizer.lr.LinearWarmup(
learning_rate=learning_rate,
warmup_steps=wamup_epochs,
warmup_steps=wamup_steps,
start_lr=base_lr / 5.,
end_lr=base_lr,
verbose=True)
Expand Down
47 changes: 28 additions & 19 deletions python/paddle/hapi/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,20 +30,28 @@
import paddle
from paddle import fluid
from paddle.fluid import core
from paddle.fluid.framework import in_dygraph_mode, Variable, ParamBase, _current_expected_place
from paddle.fluid.framework import in_dygraph_mode, Variable, _get_paddle_place
from paddle.fluid.framework import in_dygraph_mode
from paddle.fluid.framework import Variable
from paddle.fluid.framework import ParamBase
from paddle.fluid.framework import _current_expected_place
from paddle.fluid.framework import _get_paddle_place
from paddle.fluid.framework import _current_expected_place as _get_device
from paddle.fluid.executor import global_scope
from paddle.fluid.io import is_belong_to_optimizer
from paddle.fluid.dygraph.base import to_variable
from paddle.fluid.dygraph.parallel import ParallelEnv
from paddle.fluid.dygraph.dygraph_to_static.program_translator import ProgramTranslator, FunctionSpec
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX, INFER_PARAMS_SUFFIX
from paddle.fluid.dygraph.dygraph_to_static.program_translator import ProgramTranslator
from paddle.fluid.dygraph.dygraph_to_static.program_translator import FunctionSpec
from paddle.fluid.dygraph.io import INFER_MODEL_SUFFIX
from paddle.fluid.dygraph.io import INFER_PARAMS_SUFFIX
from paddle.fluid.layers.utils import flatten
from paddle.fluid.layers import collective

from paddle.io import DataLoader, Dataset, DistributedBatchSampler
from paddle.fluid.executor import scope_guard, Executor
from paddle.io import DataLoader
from paddle.io import Dataset
from paddle.io import DistributedBatchSampler
from paddle.fluid.executor import scope_guard
from paddle.fluid.executor import Executor
from paddle.fluid.dygraph.layers import Layer
from paddle.metric import Metric
from paddle.static import InputSpec as Input
Expand Down Expand Up @@ -166,7 +174,6 @@ def init_communicator(program, rank, nranks, wait_port, current_endpoint,
name=unique_name.generate('hccl_id'),
persistable=True,
type=core.VarDesc.VarType.RAW)
endpoint_to_index_map = {e: idx for idx, e in enumerate(endpoints)}
block.append_op(
type='c_gen_hccl_id',
inputs={},
Expand Down Expand Up @@ -1363,8 +1370,9 @@ def _check_pure_fp16_configs():
# pure float16 training has some restricts now
if self._adapter._amp_level == "O2":
if in_dygraph_mode():
warnings.warn("Pure float16 training is not supported in dygraph mode now, "\
"and it will be supported in future version.")
warnings.warn(
"Pure float16 training is not supported in dygraph mode now, and it will be supported in future version."
)
else:
# grad clip is not supported in pure fp16 training now
assert self._optimizer._grad_clip is None, \
Expand Down Expand Up @@ -1398,8 +1406,7 @@ def _check_pure_fp16_configs():

if 'use_pure_fp16' in amp_configs:
raise ValueError(
"''use_pure_fp16' is an invalid parameter, "
"the level of mixed precision training only depends on 'O1' or 'O2'."
"'use_pure_fp16' is an invalid parameter, the level of mixed precision training only depends on 'O1' or 'O2'."
)

_check_pure_fp16_configs()
Expand Down Expand Up @@ -1427,9 +1434,8 @@ def _check_amp_configs(amp_config_key_set):
}
if amp_config_key_set - accepted_param_set:
raise ValueError(
"Except for 'level', the keys of 'amp_configs' must be accepted by mixed precision APIs, "
"but {} could not be recognized.".format(
tuple(amp_config_key_set - accepted_param_set)))
"Except for 'level', the keys of 'amp_configs' must be accepted by mixed precision APIs, but {} could not be recognized.".
format(tuple(amp_config_key_set - accepted_param_set)))

if 'use_fp16_guard' in amp_config_key_set:
if in_dygraph_mode():
Expand Down Expand Up @@ -1501,8 +1507,9 @@ def prepare(self, optimizer=None, loss=None, metrics=None,
self._optimizer = optimizer
if loss is not None:
if not isinstance(loss, paddle.nn.Layer) and not callable(loss):
raise TypeError("'loss' must be sub classes of " \
"`paddle.nn.Layer` or any callable function.")
raise TypeError(
"'loss' must be sub classes of `paddle.nn.Layer` or any callable function."
)
self._loss = loss

metrics = metrics or []
Expand Down Expand Up @@ -2122,9 +2129,11 @@ def _verify_spec(self, specs, shapes=None, dtypes=None, is_input=False):
else:
out_specs = to_list(specs)
elif isinstance(specs, dict):
assert is_input == False
out_specs = [specs[n] \
for n in extract_args(self.network.forward) if n != 'self']
assert is_input is False
out_specs = [
specs[n] for n in extract_args(self.network.forward)
if n != 'self'
]
else:
out_specs = to_list(specs)
# Note: checks each element has specificed `name`.
Expand Down
2 changes: 1 addition & 1 deletion python/paddle/vision/transforms/functional_cv2.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def to_tensor(pic, data_format='CHW'):
"""

if not data_format in ['CHW', 'HWC']:
if data_format not in ['CHW', 'HWC']:
raise ValueError('data_format should be CHW or HWC. Got {}'.format(
data_format))

Expand Down
2 changes: 1 addition & 1 deletion python/paddle/vision/transforms/functional_pil.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def to_tensor(pic, data_format='CHW'):
"""

if not data_format in ['CHW', 'HWC']:
if data_format not in ['CHW', 'HWC']:
raise ValueError('data_format should be CHW or HWC. Got {}'.format(
data_format))

Expand Down

1 comment on commit bf931ab

@paddle-bot-old
Copy link

@paddle-bot-old paddle-bot-old bot commented on bf931ab Jun 10, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🕵️ CI failures summary

🔍PR: #33307 Commit ID: bf931ab contains failed CI.

Please sign in to comment.