Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Enhancement] Unify the use of print_log and logger #997

Merged
merged 1 commit into from
Mar 17, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 4 additions & 14 deletions mmengine/model/base_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,15 +95,12 @@ def init_weights(self):
for sub_module in self.modules():
sub_module._params_init_info = self._params_init_info

logger = MMLogger.get_current_instance()
logger_name = logger.instance_name

module_name = self.__class__.__name__
if not self._is_init:
if self.init_cfg:
print_log(
f'initialize {module_name} with init_cfg {self.init_cfg}',
logger=logger_name,
logger='current',
level=logging.DEBUG)

init_cfgs = self.init_cfg
Expand Down Expand Up @@ -145,7 +142,6 @@ def init_weights(self):
level=logging.WARNING)

if is_top_level_module:
# self._dump_init_info(logger_name)
self._dump_init_info()

for sub_module in self.modules():
Expand All @@ -154,14 +150,9 @@ def init_weights(self):
@master_only
def _dump_init_info(self):
"""Dump the initialization information to a file named
`initialization.log.json` in workdir.

Args:
logger_name (str): The name of logger.
"""
`initialization.log.json` in workdir."""

logger = MMLogger.get_current_instance()
logger_name = logger.instance_name
with_file_handler = False
# dump the information to the logger file if there is a `FileHandler`
for handler in logger.handlers:
Expand All @@ -176,10 +167,9 @@ def _dump_init_info(self):
with_file_handler = True
if not with_file_handler:
for name, param in self.named_parameters():
print_log(
logger.info(
zhouzaida marked this conversation as resolved.
Show resolved Hide resolved
f'\n{name} - {param.shape}: '
f"\n{self._params_init_info[param]['init_info']} \n ",
logger=logger_name)
f"\n{self._params_init_info[param]['init_info']} \n ")

def __repr__(self):
s = super().__repr__()
Expand Down
11 changes: 5 additions & 6 deletions mmengine/model/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,14 +84,13 @@ def traverse(grad_fn):
traverse(grad_fn)

traverse(loss.grad_fn)
from mmengine.logging import MMLogger
logger = MMLogger.get_current_instance()
for n, p in model.named_parameters():
if p not in parameters_in_graph and p.requires_grad:
logger.log(
level=logging.ERROR,
msg=f'{n} with shape {p.size()} is not '
f'in the computational graph \n')
print_log(
f'{n} with shape {p.size()} is not '
f'in the computational graph \n',
logger='current',
level=logging.ERROR)


def merge_dict(*args):
Expand Down
29 changes: 10 additions & 19 deletions mmengine/runner/runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -1971,13 +1971,10 @@ def resume(self,
current_seed = self._randomness_cfg.get('seed')
if resumed_seed is not None and resumed_seed != current_seed:
if current_seed is not None:
print_log(
f'The value of random seed in the '
f'checkpoint "{resumed_seed}" is '
f'different from the value in '
f'`randomness` config "{current_seed}"',
logger='current',
level=logging.WARNING)
self.logger.warning(f'The value of random seed in the '
f'checkpoint "{resumed_seed}" is '
f'different from the value in '
f'`randomness` config "{current_seed}"')
self._randomness_cfg.update(seed=resumed_seed)
self.set_randomness(**self._randomness_cfg)

Expand All @@ -1988,13 +1985,11 @@ def resume(self,
# np.ndarray, which cannot be directly judged as equal or not,
# therefore we just compared their dumped results.
if pickle.dumps(resumed_dataset_meta) != pickle.dumps(dataset_meta):
print_log(
self.logger.warning(
'The dataset metainfo from the resumed checkpoint is '
'different from the current training dataset, please '
'check the correctness of the checkpoint or the training '
'dataset.',
logger='current',
level=logging.WARNING)
'dataset.')

self.message_hub.load_state_dict(checkpoint['message_hub'])

Expand All @@ -2006,11 +2001,9 @@ def resume(self,

# resume param scheduler
if resume_param_scheduler and self.param_schedulers is None:
print_log(
self.logger.warning(
'`resume_param_scheduler` is True but `self.param_schedulers` '
'is None, so skip resuming parameter schedulers',
logger='current',
level=logging.WARNING)
'is None, so skip resuming parameter schedulers')
resume_param_scheduler = False
if 'param_schedulers' in checkpoint and resume_param_scheduler:
self.param_schedulers = self.build_param_scheduler( # type: ignore
Expand Down Expand Up @@ -2167,11 +2160,9 @@ def save_checkpoint(

# save param scheduler state dict
if save_param_scheduler and self.param_schedulers is None:
print_log(
self.logger.warning(
'`save_param_scheduler` is True but `self.param_schedulers` '
'is None, so skip saving parameter schedulers',
logger='current',
level=logging.WARNING)
'is None, so skip saving parameter schedulers')
save_param_scheduler = False
if save_param_scheduler:
if isinstance(self.param_schedulers, dict):
Expand Down
16 changes: 9 additions & 7 deletions mmengine/visualization/vis_backend.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Copyright (c) OpenMMLab. All rights reserved.
import copy
import functools
import logging
import os
import os.path as osp
import warnings
Expand All @@ -13,7 +14,7 @@

from mmengine.config import Config
from mmengine.fileio import dump
from mmengine.logging import MMLogger
from mmengine.logging import print_log
from mmengine.registry import VISBACKENDS
from mmengine.utils.dl_utils import TORCH_VERSION

Expand Down Expand Up @@ -42,12 +43,13 @@ def wrapper(obj: object, *args, **kwargs):
# `_env_initialized` is False, call `_init_env` and set
# `_env_initialized` to True
if not getattr(obj, '_env_initialized', False):
logger = MMLogger.get_current_instance()
logger.debug('Attribute `_env_initialized` is not defined in '
f'{type(obj)} or `{type(obj)}._env_initialized is '
'False, `_init_env` will be called and '
f'{type(obj)}._env_initialized will be set to '
'True')
print_log(
'Attribute `_env_initialized` is not defined in '
f'{type(obj)} or `{type(obj)}._env_initialized is '
'False, `_init_env` will be called and '
f'{type(obj)}._env_initialized will be set to True',
logger='current',
level=logging.DEBUG)
obj._init_env() # type: ignore
obj._env_initialized = True # type: ignore

Expand Down