Skip to content
Snippets Groups Projects
Commit bac54e14 authored by Franck Dary's avatar Franck Dary
Browse files

Added support for ELU activation

parent d66aeb6b
No related branches found
No related tags found
No related merge requests found
......@@ -19,6 +19,7 @@ class MLP
SIGMOID,
TANH,
RELU,
ELU,
LINEAR,
SPARSEMAX,
CUBE,
......
......@@ -15,6 +15,9 @@ std::string MLP::activation2str(Activation a)
case RELU :
return "RELU";
break;
case ELU :
return "ELU";
break;
case CUBE :
return "CUBE";
break;
......@@ -43,6 +46,8 @@ MLP::Activation MLP::str2activation(std::string s)
return LINEAR;
else if(s == "RELU")
return RELU;
else if(s == "ELU")
return ELU;
else if(s == "CUBE")
return CUBE;
else if(s == "SIGMOID")
......@@ -230,6 +235,9 @@ inline dynet::Expression MLP::activate(dynet::Expression h, Activation f)
case RELU :
return rectify(h);
break;
case ELU :
return elu(h);
break;
case SIGMOID :
return logistic(h);
break;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment