From bcfb64ff7c9c1485422f0d7c004863c93521f537 Mon Sep 17 00:00:00 2001 From: JamesLear92 Date: Fri, 22 Nov 2019 14:24:29 +0000 Subject: [PATCH 1/4] Added 'Mish' function to ActivationType enum --- NeuralNetwork.NET/APIs/Enums/ActivationType.cs | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/NeuralNetwork.NET/APIs/Enums/ActivationType.cs b/NeuralNetwork.NET/APIs/Enums/ActivationType.cs index 7981751..e7abe65 100644 --- a/NeuralNetwork.NET/APIs/Enums/ActivationType.cs +++ b/NeuralNetwork.NET/APIs/Enums/ActivationType.cs @@ -1,4 +1,4 @@ -namespace NeuralNetworkNET.APIs.Enums +namespace NeuralNetworkNET.APIs.Enums { /// /// Indicates an activation function to use in a neural network @@ -59,6 +59,14 @@ public enum ActivationType : byte /// /// A linear activation function that just returns the input value /// - Identity + Identity, + + /// + /// The Mish function, proposed by Diganta Misra (https://arxiv.org/abs/1908.08681) + /// Definition: x tanh(ln(1 + e^2)) + /// Implimentation: x * Tanh(Softplus(x)) + /// + Mish + } -} \ No newline at end of file +} From 862ad39958f8ff007098a1dd82feee49f1cd8025 Mon Sep 17 00:00:00 2001 From: JamesLear92 Date: Fri, 22 Nov 2019 14:25:40 +0000 Subject: [PATCH 2/4] Added Mish functions to ActivationFunctionProvider --- .../Networks/Activations/ActivationFunctionProvider.cs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/NeuralNetwork.NET/Networks/Activations/ActivationFunctionProvider.cs b/NeuralNetwork.NET/Networks/Activations/ActivationFunctionProvider.cs index 41726b0..78ef6c8 100644 --- a/NeuralNetwork.NET/Networks/Activations/ActivationFunctionProvider.cs +++ b/NeuralNetwork.NET/Networks/Activations/ActivationFunctionProvider.cs @@ -1,4 +1,4 @@ -using System; +using System; using JetBrains.Annotations; using NeuralNetworkNET.APIs.Enums; using NeuralNetworkNET.Networks.Activations.Delegates; @@ -28,7 +28,8 @@ public static (ActivationFunction, ActivationFunction) GetActivations(Activation case ActivationType.Softmax: return (ActivationFunctions.Softmax, null); case ActivationType.Softplus: return (ActivationFunctions.Softplus, ActivationFunctions.Sigmoid); case ActivationType.ELU: return (ActivationFunctions.ELU, ActivationFunctions.ELUPrime); - case ActivationType.Identity: return (ActivationFunctions.Identity, ActivationFunctions.Identityprime); + case ActivationType.Identity: return (ActivationFunctions.Identity, ActivationFunctions.IdentityPrime); + case ActivationType.Mish: return (ActivationFunctions.Mish, ActivationFunctions.MishPrime); default: throw new ArgumentOutOfRangeException(nameof(ActivationType), "Unsupported activation function"); } From 0c10175a7fff625a3ae82c62d99f5844ecb5ad0c Mon Sep 17 00:00:00 2001 From: JamesLear92 Date: Fri, 22 Nov 2019 14:26:16 +0000 Subject: [PATCH 3/4] Added Mish functions to ActivationFunctions --- .../Activations/ActivationFunctions.cs | 29 +++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/NeuralNetwork.NET/Networks/Activations/ActivationFunctions.cs b/NeuralNetwork.NET/Networks/Activations/ActivationFunctions.cs index 9b21adf..ac3b76e 100644 --- a/NeuralNetwork.NET/Networks/Activations/ActivationFunctions.cs +++ b/NeuralNetwork.NET/Networks/Activations/ActivationFunctions.cs @@ -1,4 +1,4 @@ -using System; +using System; using System.Runtime.CompilerServices; using JetBrains.Annotations; @@ -217,6 +217,31 @@ public static float Softplus(float x) [PublicAPI] [Pure] [MethodImpl(MethodImplOptions.AggressiveInlining)] - public static float Identityprime(float x) => 1; + public static float IdentityPrime(float x) => 1; + + /// + /// Applies the Mish function + /// + /// The input to process + [PublicAPI] + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static float Mish(float x) => x * Tanh(Softplus(x)); + + /// + /// Applies the Mish Derivative function + /// + /// The input to process + [PublicAPI] + [Pure] + [MethodImpl(MethodImplOptions.AggressiveInlining)] + public static float MishPrime(float x) + { + float + omega = (float)Math.Exp(3 * x) + 4 * (float)Math.Exp(2 * x) + (6 + 4 * x) * (float)Math.Exp(x) + 4 * (1 + x), + delta = 1 + (float)Math.Pow((Math.Exp(x) + 1), 2), + derivative = (float)Math.Exp(x) * omega / (float)Math.Pow(delta, 2); + return derivative; + } } } From a938ea4c8db4fc9b36aad0944f96c9da4c6fc708 Mon Sep 17 00:00:00 2001 From: JamesLear92 Date: Sun, 24 Nov 2019 17:05:07 +0000 Subject: [PATCH 4/4] Updated MishPrime function to avoid Math.Pow Math.Pow is not supported, so method has been rewritten to avoid it. --- .../Networks/Activations/ActivationFunctions.cs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/NeuralNetwork.NET/Networks/Activations/ActivationFunctions.cs b/NeuralNetwork.NET/Networks/Activations/ActivationFunctions.cs index ac3b76e..5b32a7d 100644 --- a/NeuralNetwork.NET/Networks/Activations/ActivationFunctions.cs +++ b/NeuralNetwork.NET/Networks/Activations/ActivationFunctions.cs @@ -238,10 +238,9 @@ public static float Softplus(float x) public static float MishPrime(float x) { float - omega = (float)Math.Exp(3 * x) + 4 * (float)Math.Exp(2 * x) + (6 + 4 * x) * (float)Math.Exp(x) + 4 * (1 + x), - delta = 1 + (float)Math.Pow((Math.Exp(x) + 1), 2), - derivative = (float)Math.Exp(x) * omega / (float)Math.Pow(delta, 2); - return derivative; + s = 2 * (float)Math.Exp(x) + (float)Math.Exp(2 * x) + 2, + w = 4 * (x + 1) + (4 * ((float)Math.Exp(2 * x))) + (float)Math.Exp(3 * x) + (float)Math.Exp(x) * (4 * x + 6); + return (float)Math.Exp(x) * w / (s * s); } } }