ANN  0.1.1.5
A library containing multiple neural network models written in C
tools.c
Go to the documentation of this file.
1 #include <stdlib.h>
2 #include <math.h>
3 #include <time.h>
4 #include "ANN/tools.h"
5 
6 // Weight/bias initialization functions
8 {
9  return (((double)rand())/RAND_MAX*2.0-1.0);
10 }
11 
12 double f_init_input()
13 {
14  return 1.0;
15 }
16 
17 
18 // Activation functions
19 double f_act_sigmoid(double n)
20 {
21  return 1/(1 + exp(-n));
22 }
23 
24 double f_act_sigmoid_de(double n)
25 {
26  return f_act_sigmoid(n) * (1 - f_act_sigmoid(n));
27 }
28 
29 double f_act_input(double n)
30 {
31  return n;
32 }
33 
34 double f_act_input_de(double n __attribute__((unused)))
35 {
36  return 1;
37 }
38 
39 double f_act_relu(double n)
40 {
41  return n >= 0 ? n : 0;
42 }
43 
44 double f_act_relu_de(double n)
45 {
46  return n >= 0 ? 1 : 0;
47 }
48 
49 double f_act_softplus(double n)
50 {
51  return log10(1 + exp(n));
52 }
53 
54 double f_act_softplus_de(double n)
55 {
56  return f_act_sigmoid(n);
57 }
58 
59 double f_act_elu(double n)
60 {
61  return n >= 0 ? n : F_ACT_ELU_ALPHA * (exp(n) - 1);
62 }
63 
64 double f_act_elu_de(double n)
65 {
66  return n >= 0 ? 1 : f_act_elu(n) + F_ACT_ELU_ALPHA;
67 }
68 
69 double f_act_swish(double n)
70 {
71  return n * f_act_sigmoid(n);
72 }
73 
74 double f_act_swish_de(double n)
75 {
76  return f_act_sigmoid(n) + (n * f_act_sigmoid_de(n));
77 }
78 
79 
80 // Cost functions
81 
82 double f_cost_quadratic_loss(double o, double t)
83 {
84  return (t - o) * (t - o) * F_COST_QUADRATIC_CONSTANT;
85 }
86 
87 double f_cost_quadratic_loss_de(double o, double t)
88 {
89  return 2 * (o - t) * F_COST_QUADRATIC_CONSTANT;
90 }
double f_cost_quadratic_loss_de(double o, double t)
Derivative Quadratic cost function(for backpropagation algorithm)
Definition: tools.c:87
double f_act_elu(double n)
Elu activation function (for feedforward algorithm)
Definition: tools.c:59
double f_act_swish_de(double n)
Derivative Swish activation function (for backpropagation algorithm)
Definition: tools.c:74
double f_cost_quadratic_loss(double o, double t)
Quadratic cost function.
Definition: tools.c:82
double f_init_rand_norm()
Weight and bias initialization function for hidden and output layer.
Definition: tools.c:7
double f_act_input(double n)
Activation function for input layer (for feedforward algorithm)
Definition: tools.c:29
double f_act_softplus(double n)
SoftPlus activation function (for feedforward algorithm)
Definition: tools.c:49
#define F_ACT_ELU_ALPHA
Definition: tools.h:107
double f_act_relu(double n)
ReLu activation function (for feedforward algorithm)
Definition: tools.c:39
#define F_COST_QUADRATIC_CONSTANT
Definition: tools.h:149
double f_act_softplus_de(double n)
Derivative SoftPlus activation function (for backpropagation algorithm)
Definition: tools.c:54
double f_init_input()
Weight and bias initialization function input layer.
Definition: tools.c:12
double f_act_swish(double n)
Swish activation function (for feedforward algorithm)
Definition: tools.c:69
Some useful functions.
double f_act_sigmoid_de(double n)
Derivative sigmoid activation function (for backpropagation algorithm)
Definition: tools.c:24
double f_act_sigmoid(double n)
Sigmoid activation function (for feedforward algorithm)
Definition: tools.c:19
double f_act_elu_de(double n)
Derivative Elu activation function (for backpropagation algorithm)
Definition: tools.c:64
double f_act_input_de(double n __attribute__((unused)))
Definition: tools.c:34
double f_act_relu_de(double n)
Derivative ReLu activation function (for backpropagation algorithm)
Definition: tools.c:44