58 virtual void train(
bool training =
true) {
90 Linear(
size_t in_features,
size_t out_features,
bool bias =
true);
102 std::vector<Variable<T>*>
parameters()
override;
115 size_t out_features_;
117 void initialize_parameters();
127 std::vector<Variable<T>*>
parameters()
override {
return {}; }
137 std::vector<Variable<T>*>
parameters()
override {
return {}; }
147 std::vector<Variable<T>*>
parameters()
override {
return {}; }
163 std::vector<Variable<T>*>
parameters()
override {
return {}; }
188 std::vector<Variable<T>*>
parameters()
override;
198 void train(
bool training =
true)
override;
201 std::vector<std::shared_ptr<Module<T>>> modules_;
PyTorch-like automatic differentiation engine.
Dropout layer for regularization.
Dropout(T p=0.5)
Constructor.
std::vector< Variable< T > * > parameters() override
Get all parameters of this module.
Variable< T > forward(const Variable< T > &input) override
Forward pass through the module.
Linear (fully connected) layer: y = xW^T + b.
const Variable< T > & bias() const
const Variable< T > & weight() const
Variable< T > forward(const Variable< T > &input) override
Forward pass: y = xW^T + b.
std::vector< Variable< T > * > parameters() override
Get parameters (weight and bias)
Base class for all neural network modules (PyTorch-like nn.Module)
virtual void eval()
Set evaluation mode.
virtual void train(bool training=true)
Set training mode.
bool is_training() const
Check if module is in training mode.
virtual ~Module()=default
virtual void zero_grad()
Zero gradients of all parameters.
virtual Variable< T > forward(const Variable< T > &input)=0
Forward pass through the module.
virtual std::vector< Variable< T > * > parameters()=0
Get all parameters of this module.
ReLU activation function.
Variable< T > forward(const Variable< T > &input) override
Forward pass through the module.
std::vector< Variable< T > * > parameters() override
Get all parameters of this module.
Sequential container for chaining modules.
Variable< T > forward(const Variable< T > &input) override
Forward pass through all modules in sequence.
void add_module(std::shared_ptr< Module< T > > module)
Add a module to the sequence.
std::vector< Variable< T > * > parameters() override
Get parameters from all modules.
void train(bool training=true) override
Set training mode for all modules.
void zero_grad() override
Zero gradients for all modules.
Sigmoid activation function.
Variable< T > forward(const Variable< T > &input) override
Forward pass through the module.
std::vector< Variable< T > * > parameters() override
Get all parameters of this module.
Tanh activation function.
Variable< T > forward(const Variable< T > &input) override
Forward pass through the module.
std::vector< Variable< T > * > parameters() override
Get all parameters of this module.
Variable class that supports automatic differentiation.
Matrix utility class for deep learning operations.