DLPrimitives
loss.hpp
1 #pragma once
2 #include <dlprim/tensor.hpp>
3 #include <dlprim/context.hpp>
4 namespace dlprim {
5 namespace core {
6 
10  void softmax_forward(Tensor &x,Tensor &y,bool log_softmax,ExecutionContext const &e);
14  void softmax_backward(Tensor &dx,Tensor &y,Tensor &dy,bool log_softmax,float factor,ExecutionContext const &e);
15 
19  void nll_loss_forward(Tensor &x,Tensor &label,Tensor &y,bool reduce,float scale,ExecutionContext const &e);
23  void nll_loss_backward(Tensor &dx,Tensor &label,Tensor &dy,bool reduce,float scale,float factor,ExecutionContext const &e);
24 
25 } // core
26 } // dlprim
void nll_loss_forward(Tensor &x, Tensor &label, Tensor &y, bool reduce, float scale, ExecutionContext const &e)
Compute forward Negative log likelehood loss x should be log of prob.
void softmax_forward(Tensor &x, Tensor &y, bool log_softmax, ExecutionContext const &e)
Compute softmax output of x-> to y. if log_softmax true compute log of the output value...
void nll_loss_backward(Tensor &dx, Tensor &label, Tensor &dy, bool reduce, float scale, float factor, ExecutionContext const &e)
Compute forward Negative log likelehood loss x should be log of prob.
void softmax_backward(Tensor &dx, Tensor &y, Tensor &dy, bool log_softmax, float factor, ExecutionContext const &e)
Softmax backpropogation.
Mane namespace.
Definition: context.hpp:9