Skip to content

Commit

Permalink
unit double (PaddlePaddle#32908)
Browse files Browse the repository at this point in the history
  • Loading branch information
Thunderbrook committed May 18, 2021
1 parent 7941571 commit e0aa1d6
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 4 deletions.
19 changes: 16 additions & 3 deletions python/paddle/fluid/incubate/fleet/parameter_server/pslib/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ def add_sparse_table(self, table_id, strategy):
support_accessor_class = [
'DownpourFeatureValueAccessor', 'DownpourCtrAccessor',
'DownpourSparseValueAccessor', 'DownpourCtrDoubleAccessor',
'DownpourUnitAccessor'
'DownpourUnitAccessor', 'DownpourDoubleUnitAccessor'
]
if strategy.get('sparse_accessor_class') is not None:
accessor_class = strategy.get('sparse_accessor_class')
Expand Down Expand Up @@ -254,7 +254,7 @@ def add_sparse_table(self, table_id, strategy):
table2.param = 2
table2.converter = converter
table2.deconverter = deconverter
elif accessor_class == 'DownpourUnitAccessor':
elif accessor_class == 'DownpourUnitAccessor' or accessor_class == 'DownpourDoubleUnitAccessor':
self.add_sparse_table_common_config(table, strategy)
self.add_sparse_optimizer(table.accessor.embed_sgd_param,
strategy, "embed_")
Expand Down Expand Up @@ -380,7 +380,7 @@ def add_data_norm_table(self, table_id, learning_rate, param_var, grad_var,
table.accessor.fea_dim = fea_dim

def add_sparse_optimizer(self, sgd, strategy, prefix):
optimizer_name = strategy.get(prefix + "sparse_optimizer", "adam")
optimizer_name = strategy.get(prefix + "sparse_optimizer", "adagrad")
sgd.name = optimizer_name
if optimizer_name == "naive":
sgd.naive.learning_rate = \
Expand All @@ -394,6 +394,19 @@ def add_sparse_optimizer(self, sgd, strategy, prefix):
strategy.get(prefix + 'sparse_learning_rate', 0.05)
sgd.adagrad.initial_range = \
strategy.get(prefix + 'sparse_initial_range', 1e-4)
if prefix == "embed_":
sgd.adagrad.initial_range = 0
sgd.adagrad.initial_g2sum = strategy.get(
prefix + 'sparse_initial_g2sum', 3)
bounds = strategy.get(prefix + 'sparse_weight_bounds', [-10, 10])
sgd.adagrad.weight_bounds.extend(bounds)
elif optimizer_name == "std_adagrad":
sgd.adagrad.learning_rate = \
strategy.get(prefix + 'sparse_learning_rate', 0.05)
sgd.adagrad.initial_range = \
strategy.get(prefix + 'sparse_initial_range', 1e-4)
if prefix == "embed_":
sgd.adagrad.initial_range = 0
sgd.adagrad.initial_g2sum = strategy.get(
prefix + 'sparse_initial_g2sum', 3)
bounds = strategy.get(prefix + 'sparse_weight_bounds', [-10, 10])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -319,6 +319,7 @@ def _minimize(self,
# user do not have to set it in config_fleet
if accessor == "DownpourFeatureValueAccessor" \
or accessor == "DownpourCtrAccessor" \
or accessor == "DownpourDoubleUnitAccessor" \
or accessor == "DownpourUnitAccessor":
if st.get("sparse_embedx_dim") is not None \
and st["sparse_embedx_dim"] != emb_to_size[key] - 3:
Expand Down Expand Up @@ -534,7 +535,7 @@ def _minimize(self,
if server._server.downpour_server_param.downpour_table_param[
0].accessor.accessor_class in [
"DownpourCtrAccessor", "DownpourCtrDoubleAccessor",
"DownpourUnitAccessor"
"DownpourUnitAccessor", "DownpourDoubleUnitAccessor"
]:
opt_info["dump_slot"] = True
elif server._server.downpour_server_param.downpour_table_param[
Expand Down

0 comments on commit e0aa1d6

Please sign in to comment.