AdaGrad

class AdaGrad : SGDOptimizer {
float[][] W;
float[][] grad;
float eps;
float[][] acc_grad;
void delegate(NeuralLayer, float[]) _update;
}

Examples

1 
2 auto nn = NeuralNet()
3     .stack(SparseData(1000))
4     .stack(Linear(1));
5 
6 // Adagrad learning with 5 epochs, learning rate 0.1, mini-batch size of 100:
7 nn.learn(data, "square", AdaGrad(5, 0.1, 100));

Meta