Chisei v1.0
Lightweight AI/ML Framework
Loading...
Searching...
No Matches
activation_functions.hpp
Go to the documentation of this file.
1/*
2 *
3 * Copyright 2025 Nathanne Isip
4 *
5 * Redistribution and use in source and binary forms,
6 * with or without modification, are permitted provided
7 * that the following conditions are met:
8 *
9 * 1. Redistributions of source code must retain the
10 * above copyright notice, this list of conditions
11 * and the following disclaimer.
12 *
13 * 2. Redistributions in binary form must reproduce
14 * the above copyright notice, this list of conditions
15 * and the following disclaimer in the documentation
16 * and/or other materials provided with the distribution.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
19 * CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES,
20 * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
21 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
23 * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
25 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
27 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
28 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
29 * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
30 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
31 * DAMAGE.
32 *
33 */
34
41#ifndef CHISEI_ACTIVATION_FUNCTION_HPP
42#define CHISEI_ACTIVATION_FUNCTION_HPP
43
44#include <cmath>
45
46namespace chisei {
47
57 class ActivationFunctions final {
58 public:
59
71 static constexpr inline double sigmoid_activation(double x) noexcept {
72 return 1.0 / (1.0 + std::exp(-x));
73 }
74
88 static constexpr inline double sigmoid_derivative(double x) noexcept {
89 return x * (1.0 - x);
90 }
91
103 static constexpr inline double relu_activation(double x) noexcept {
104 return x > 0.0 ? x : 0.0;
105 }
106
122 static constexpr inline double relu_derivative(double x) noexcept {
123 return x > 0.0 ? 1.0 : 0.0;
124 }
125
137 static constexpr inline double tanh_activation(double x) noexcept {
138 return std::tanh(x);
139 }
140
154 static constexpr inline double tanh_derivative(double x) noexcept {
155 return 1.0 - x * x;
156 }
157 };
158}
159
160#endif
Provides a collection of static methods for activation functions and their derivatives.
static constexpr double sigmoid_derivative(double x) noexcept
Computes the derivative of the Sigmoid function.
static constexpr double tanh_activation(double x) noexcept
Computes the Tanh (Hyperbolic Tangent) activation function.
static constexpr double sigmoid_activation(double x) noexcept
Computes the Sigmoid activation function.
static constexpr double relu_derivative(double x) noexcept
Computes the derivative of the ReLU function.
static constexpr double relu_activation(double x) noexcept
Computes the ReLU (Rectified Linear Unit) activation function.
static constexpr double tanh_derivative(double x) noexcept
Computes the derivative of the Tanh function.