1 /**
2     Contains an implementation of dense (i.e., fully connected) layers.
3     Authors: Henry Gouk
4 */
5 module dopt.nnet.layers.dense;
6 
7 import dopt;
8 import dopt.nnet.layers.util;
9 
10 /**
11     Encapsulates additional options for dense layers.
12 */
13 class DenseOptions
14 {
15     this()
16     {
17         _weightInit = heGaussianInit();
18         _biasInit = constantInit(0.0f);
19         _useBias = true;
20         _weightDecay = 0;
21     }
22 
23     mixin(dynamicProperties(
24         "ParamInitializer", "weightInit",
25         "ParamInitializer", "biasInit",
26         "Projection", "weightProj",
27         "Projection", "biasProj",
28         "float", "weightDecay",
29         "bool", "useBias"
30     ));
31 }
32 
33 ///
34 unittest
35 {
36     //Create a DenseOptions object with the default parameters
37     auto opts = new DenseOptions()
38                .weightInit(heGaussianInit())
39                .biasInit(constantInit(0.0f))
40                .weightProj(null)
41                .biasProj(null)
42                .weightDecay(0.0f)
43                .useBias(true);
44     
45     //Options can also be read back again later
46     assert(opts.weightDecay == 0.0f);
47     assert(opts.useBias == true);
48 }
49 
50 /**
51     Creates a fully connected (AKA, dense) layer.
52 
53     Params:
54         input = The previous layer in the network.
55         numOutputs = The number of units in this layer.
56         opts = Additional options with sensible default values.
57     
58     Returns:
59         The new layer.
60 */
61 Layer dense(Layer input, size_t numOutputs, DenseOptions opts = new DenseOptions())
62 {
63     auto x = input.output;
64     auto xTr = input.trainOutput;
65 
66     x = x.reshape([x.shape[0], x.volume / x.shape[0]]);
67     xTr = xTr.reshape([xTr.shape[0], xTr.volume / xTr.shape[0]]);
68 
69     auto weights = float32([numOutputs, x.shape[1]]);
70     opts._weightInit(weights);
71 
72     auto weightLoss = (opts.weightDecay == 0.0f) ? null : (opts.weightDecay * sum(weights * weights));
73 
74     Parameter[] params = [
75         Parameter(weights, weightLoss, opts.weightProj)
76     ];
77 
78     auto y = matmul(x, weights.transpose([1, 0]));
79     auto yTr = matmul(xTr, weights.transpose([1, 0]));
80 
81     if(opts.useBias)
82     {
83         auto bias = float32([numOutputs]);
84         opts._biasInit(bias);
85 
86         y = y + bias.repeat(y.shape[0]);
87         yTr = yTr + bias.repeat(yTr.shape[0]);
88 
89         params ~= Parameter(bias, null, opts.biasProj);
90     }
91 
92     return new Layer([input], y, yTr, params);
93 }