From 79627a78d752ca7459bf84b86c3e4d375c7e812b Mon Sep 17 00:00:00 2001 From: wj-Mcat <1435130236@qq.com> Date: Wed, 11 Jan 2023 14:21:38 +0800 Subject: [PATCH 1/4] update config --- paddlenlp/transformers/configuration_utils.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/paddlenlp/transformers/configuration_utils.py b/paddlenlp/transformers/configuration_utils.py index fde4640c33ca..fb2f238d6e2e 100644 --- a/paddlenlp/transformers/configuration_utils.py +++ b/paddlenlp/transformers/configuration_utils.py @@ -194,9 +194,9 @@ def convert_to_legacy_config(attribute_map: Dict[str, str], config: Dict[str, An init_arg = convert_to_legacy_config(attribute_map, init_arg) args.append(init_arg) config["init_args"] = args - - for standard_field, paddle_field in attribute_map.items(): - config[paddle_field] = config.pop(standard_field, None) or config.pop(paddle_field, None) + else: + for standard_field, paddle_field in attribute_map.items(): + config[paddle_field] = config.pop(standard_field, None) or config.pop(paddle_field, None) return config @@ -865,6 +865,8 @@ def from_dict(cls, config_dict: Dict[str, Any], **kwargs) -> "PretrainedConfig": # convert local config to legacy config config_dict = convert_to_legacy_config(cls.attribute_map, config_dict) + config_dict = flatten_model_config(config_dict) + config = cls(**config_dict) if hasattr(config, "pruned_heads"): From 3005a39f6c69e3df4f6929af1647e8902651105b Mon Sep 17 00:00:00 2001 From: wj-Mcat <1435130236@qq.com> Date: Wed, 11 Jan 2023 14:39:56 +0800 Subject: [PATCH 2/4] update legacy config --- paddlenlp/transformers/configuration_utils.py | 20 +++++++------------ 1 file changed, 7 insertions(+), 13 deletions(-) diff --git a/paddlenlp/transformers/configuration_utils.py b/paddlenlp/transformers/configuration_utils.py index fb2f238d6e2e..be3d0034e98b 100644 --- a/paddlenlp/transformers/configuration_utils.py +++ b/paddlenlp/transformers/configuration_utils.py @@ -729,16 +729,6 @@ def from_pretrained( config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs) - # do standard config map: there are some old-school pretrained-config not refactored. - config_dict = convert_to_legacy_config(cls.attribute_map, config_dict) - - config_dict = flatten_model_config(config_dict) - if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type: - logger.warning( - f"You are using a model of type {config_dict['model_type']} to instantiate a model of type " - f"{cls.model_type}. This is not supported for all configurations of models and can yield errors." - ) - return cls.from_dict(config_dict, **kwargs) @classmethod @@ -859,14 +849,18 @@ def from_dict(cls, config_dict: Dict[str, Any], **kwargs) -> "PretrainedConfig": [`PretrainedConfig`]: The configuration object instantiated from those parameters. """ return_unused_kwargs = kwargs.pop("return_unused_kwargs", False) - # Those arguments may be passed along for our internal telemetry. - # We remove them so they don't appear in `return_unused_kwargs`. - # convert local config to legacy config + # do standard config map: there are some old-school pretrained-config not refactored. config_dict = convert_to_legacy_config(cls.attribute_map, config_dict) config_dict = flatten_model_config(config_dict) + if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type: + logger.warning( + f"You are using a model of type {config_dict['model_type']} to instantiate a model of type " + f"{cls.model_type}. This is not supported for all configurations of models and can yield errors." + ) + config = cls(**config_dict) if hasattr(config, "pruned_heads"): From 656919397b96251039eee03193df41c2167a94ed Mon Sep 17 00:00:00 2001 From: wj-Mcat <1435130236@qq.com> Date: Wed, 11 Jan 2023 14:50:22 +0800 Subject: [PATCH 3/4] add comment for change --- paddlenlp/transformers/configuration_utils.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/paddlenlp/transformers/configuration_utils.py b/paddlenlp/transformers/configuration_utils.py index be3d0034e98b..4f7f31d8068e 100644 --- a/paddlenlp/transformers/configuration_utils.py +++ b/paddlenlp/transformers/configuration_utils.py @@ -194,9 +194,13 @@ def convert_to_legacy_config(attribute_map: Dict[str, str], config: Dict[str, An init_arg = convert_to_legacy_config(attribute_map, init_arg) args.append(init_arg) config["init_args"] = args - else: - for standard_field, paddle_field in attribute_map.items(): - config[paddle_field] = config.pop(standard_field, None) or config.pop(paddle_field, None) + + # TODO(wj-Mcat): to improve compatibility for: old local config and new PretrainedConfig, eg: + # { "init_args": [], "init_class": "", "num_classes": 12 } + for standard_field, paddle_field in attribute_map.items(): + value = config.pop(standard_field, None) or config.pop(paddle_field, None) + if value is not None: + config[paddle_field] = value return config From 450436483b233158c1fb10450bcf408477ce8283 Mon Sep 17 00:00:00 2001 From: wj-Mcat <1435130236@qq.com> Date: Wed, 11 Jan 2023 15:18:29 +0800 Subject: [PATCH 4/4] trigger cla