Deep Learning Algorithm Implementations 1.0.0
C++ implementations of fundamental deep learning algorithms
Loading...
Searching...
No Matches
functions.cpp
Go to the documentation of this file.
2#include <algorithm>
3#include <cmath>
4
5namespace dl::activation {
6 // ReLU Implementation
7 double ReLU::forward(double x) {
8 // TODO: Implement ReLU forward pass
9 // Formula: f(x) = max(0, x)
10 return std::max(0.0, x);
11 }
12
13 double ReLU::backward(double x) {
14 // TODO: Implement ReLU backward pass
15 // Formula: f'(x) = 1 if x > 0, else 0
16 return x > 0.0 ? 1.0 : 0.0;
17 }
18
19 // Sigmoid Implementation
20 double Sigmoid::forward(double x) {
21 // TODO: Implement Sigmoid forward pass
22 // Formula: f(x) = 1 / (1 + exp(-x))
23 return 1.0 / (1.0 + std::exp(-x));
24 }
25
26 double Sigmoid::backward(double x) {
27 // TODO: Implement Sigmoid backward pass
28 // Formula: f'(x) = f(x) * (1 - f(x))
29 double fx = forward(x);
30 return fx * (1.0 - fx);
31 }
32
33 // Tanh Implementation
34 double Tanh::forward(double x) {
35 // TODO: Implement Tanh forward pass
36 // Formula: f(x) = tanh(x)
37 return std::tanh(x);
38 }
39
40 double Tanh::backward(double x) {
41 // TODO: Implement Tanh backward pass
42 // Formula: f'(x) = 1 - tanh^2(x)
43 double fx = forward(x);
44 return 1.0 - fx * fx;
45 }
46
47 // Softmax Implementation
48 MatrixD Softmax::forward(const MatrixD &x) {
49 // TODO: Implement Softmax forward pass
50 // Formula: f(x_i) = exp(x_i) / sum(exp(x_j)) for all j
51 // Placeholder return
52 return MatrixD(x.rows(), x.cols());
53 }
54
55 MatrixD Softmax::backward(const MatrixD &x) {
56 // TODO: Implement Softmax backward pass
57 // Formula: Jacobian matrix computation
58 // Placeholder return
59 return MatrixD(x.rows(), x.cols());
60 }
61
62 // LeakyReLU Implementation
63 LeakyReLU::LeakyReLU(double alpha) : alpha_(alpha) {
64 // Constructor implementation
65 }
66
67 double LeakyReLU::forward(double x) {
68 // TODO: Implement LeakyReLU forward pass
69 // Formula: f(x) = x if x > 0, else alpha * x
70 return x > 0.0 ? x : alpha_ * x;
71 }
72
73 double LeakyReLU::backward(double x) {
74 // TODO: Implement LeakyReLU backward pass
75 // Formula: f'(x) = 1 if x > 0, else alpha
76 return x > 0.0 ? 1.0 : alpha_;
77 }
78} // namespace dl::activation
LeakyReLU(double alpha=0.01)
Constructor with configurable leak parameter.
Definition functions.cpp:63
double backward(double x)
Compute Leaky ReLU derivative.
Definition functions.cpp:73
double forward(double x)
Compute Leaky ReLU forward pass.
Definition functions.cpp:67
double forward(double x)
Compute ReLU forward pass.
Definition functions.cpp:7
double backward(double x)
Compute ReLU derivative.
Definition functions.cpp:13
double forward(double x)
Compute sigmoid forward pass.
Definition functions.cpp:20
double backward(double x)
Compute sigmoid derivative.
Definition functions.cpp:26
MatrixD backward(const MatrixD &x)
Compute softmax Jacobian matrix.
Definition functions.cpp:55
MatrixD forward(const MatrixD &x)
Compute softmax forward pass.
Definition functions.cpp:48
double backward(double x)
Compute tanh derivative.
Definition functions.cpp:40
double forward(double x)
Compute tanh forward pass.
Definition functions.cpp:34
Activation functions for neural networks.