1 /**
2  * Low-level math primitives.
3  *
4  * Copyright: 2017 Netflix, Inc.
5  * License: $(LINK2 http://www.apache.org/licenses/LICENSE-2.0, Apache License Version 2.0)
6  */
7 module vectorflow.math;
8 
9 private{
10     import vectorflow.neurallayer : SparseF;
11     import std.math : log1p;
12 }
13 version(LDC)
14 {
15     private{
16     import ldc.attributes;
17     import ldc.intrinsics;
18     }
19 
20     alias exp = llvm_exp;
21     alias fabs = llvm_fabs;
22     alias fmax = llvm_maxnum;
23     alias log = llvm_log;
24     alias round = llvm_round;
25     alias sqrt = llvm_sqrt;
26 }
27 else
28 {
29     public import std.math : sqrt, exp, fmax, log, round, sqrt;
30     private import std.math : abs;
31     alias fabs = abs;
32 }
33 
34 mixin template Functions()
35 {
36     static float dotProd(float[] x, float[] y) pure @nogc
37     {
38         float res = 0;
39         for(int i = 0; i < x.length; ++i)
40             res += x[i] * y[i];
41         return res;
42     }
43 
44     static void relu(float[] x, float[] y) pure @nogc
45     {
46         for(int i = 0; i < x.length; ++i)
47             y[i] = fmax(0, x[i]);
48     }
49 
50     static void axpy(float a, float[] x, float[] y) pure @nogc
51     {
52         for(int i = 0; i < x.length; ++i)
53             y[i] += a * x[i];
54     }
55 
56     static void axpy(float a, SparseF[] x, float[] y) pure @nogc
57     {
58         foreach(ref f; x)
59             y[f.id] += a * f.val;
60     }
61 
62     static void tanh(float[] x, float[] y) pure @nogc
63     {
64         for(int i = 0; i < x.length; ++i)
65         {
66             if(x[i] > 20)
67                 y[i] = 1;
68             else
69             {
70                 y[i] = exp(2 * x[i]);
71                 y[i] = (y[i] - 1) / (y[i] + 1);
72             }
73         }
74     }
75 
76     static double log1expp(double x) pure @nogc
77     {
78         if(-x > 60)
79             return x;
80         return log1p(exp(x));
81     }
82 
83     static double logistic(double x) pure @nogc
84     {
85         return 1.0 / (1 + exp(-x));
86     }
87 }
88 
89 version(LDC)
90 {
91     pragma(inline, true)
92     {
93         @fastmath
94         {
95             mixin Functions!();
96         }
97     }
98 }
99 else
100 {
101     mixin Functions!();
102 }