Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add relu6 #1193

Merged
merged 1 commit into from
Oct 18, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions src/TensorFlowNET.Core/APIs/tf.nn.cs
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,8 @@ public Tensor embedding_lookup(Tensor @params,
name: name);

public IActivation relu() => new relu();


public IActivation swish() => new swish();
public IActivation tanh() => new tanh();

Expand All @@ -111,6 +113,9 @@ public Tensor tanh(Tensor x, string name = null)
public Tensor relu(Tensor features, string name = null)
=> gen_nn_ops.relu(features, name);

public Tensor relu6(Tensor features, string name = null)
=> gen_nn_ops.relu6(features, name);

public Tensor[] fused_batch_norm(Tensor x,
Tensor scale,
Tensor offset,
Expand Down
1 change: 1 addition & 0 deletions src/TensorFlowNET.Core/Keras/Activations/Activations.cs
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ public interface IActivationsApi
Activation Linear { get; }

Activation Relu { get; }
Activation Relu6 { get; }

Activation Sigmoid { get; }

Expand Down
3 changes: 3 additions & 0 deletions src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,9 @@ public ILayer LayerNormalization(Axis? axis,
public ILayer Normalization(Shape? input_shape = null, int? axis = -1, float? mean = null, float? variance = null, bool invert = false);
public ILayer LeakyReLU(float alpha = 0.3f);

public ILayer ReLU6();


public IRnnCell LSTMCell(int uints,
string activation = "tanh",
string recurrent_activation = "sigmoid",
Expand Down
7 changes: 7 additions & 0 deletions src/TensorFlowNET.Keras/Activations.cs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,11 @@ public class Activations: IActivationsApi
Name = "relu",
ActivationFunction = (features, name) => tf.Context.ExecuteOp("Relu", name, new ExecuteOpArgs(features))
};
private static Activation _relu6 = new Activation()
{
Name = "relu6",
ActivationFunction = (features, name) => tf.Context.ExecuteOp("Relu6", name, new ExecuteOpArgs(features))
};
private static Activation _sigmoid = new Activation()
{
Name = "sigmoid",
Expand Down Expand Up @@ -55,6 +60,7 @@ static Activations()
_nameActivationMap = new Dictionary<string, Activation>();

RegisterActivation(_relu);
RegisterActivation(_relu6);
RegisterActivation(_linear);
RegisterActivation(_sigmoid);
RegisterActivation(_softmax);
Expand All @@ -65,6 +71,7 @@ static Activations()
public Activation Linear => _linear;

public Activation Relu => _relu;
public Activation Relu6 => _relu6;

public Activation Sigmoid => _sigmoid;

Expand Down
25 changes: 25 additions & 0 deletions src/TensorFlowNET.Keras/Layers/Activation/ReLu6.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using Tensorflow.Common.Types;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers
{
/// <summary>
/// Leaky version of a Rectified Linear Unit.
/// </summary>
public class ReLu6 : Layer
{
public ReLu6() : base(new LayerArgs { })
{
}

protected override Tensors Call(Tensors inputs, Tensors state = null, bool? training = null, IOptionalArgs? optional_args = null)
{
return tf.nn.relu6(inputs);
}
}
}
9 changes: 9 additions & 0 deletions src/TensorFlowNET.Keras/Layers/LayersApi.cs
Original file line number Diff line number Diff line change
Expand Up @@ -735,6 +735,15 @@ public ILayer LeakyReLU(float alpha = 0.3f)
});


/// <summary>
/// Leaky version of a Rectified Linear Unit.
/// </summary>
/// <param name="alpha">Negative slope coefficient.</param>
/// <returns></returns>
public ILayer ReLU6()
=> new ReLu6();


public IRnnCell SimpleRNNCell(
int units,
string activation = "tanh",
Expand Down
Loading