1 /**
2     Contains an implementation of the ReLU activation function.
3     
4     Authors: Henry Gouk
5 */
6 module dopt.nnet.layers.relu;
7 
8 import dopt.core;
9 import dopt.nnet;
10 import dopt.nnet.layers.util;
11 import dopt.online;
12 
13 ///
14 Layer relu(Layer input)
15 {
16     Operation reluImpl(Operation x)
17     {
18         import dopt.core.ops.nnet : relu;
19         
20         return relu(x);
21     }
22 
23     return new Layer([input], reluImpl(input.output), reluImpl(input.trainOutput), null);
24 }