summaryrefslogtreecommitdiff
path: root/neural-net/nn.c
diff options
context:
space:
mode:
Diffstat (limited to 'neural-net/nn.c')
-rw-r--r--neural-net/nn.c88
1 files changed, 88 insertions, 0 deletions
diff --git a/neural-net/nn.c b/neural-net/nn.c
new file mode 100644
index 0000000..9ad3346
--- /dev/null
+++ b/neural-net/nn.c
@@ -0,0 +1,88 @@
+#include "nn.h"
+
+static struct vec get_row(struct mat mat, int i) {
+ struct vec row = {mat.cols};
+ for (int k = 0; k < row.n; k++) row.data[k] = mat.data[i][k];
+ return row;
+}
+static void add_vec(struct vec *out, struct vec in) {
+ for (int k = 0; k < in.n; k++) out->data[k] += in.data[k];
+}
+static void add_row(struct mat *mat, int i, struct vec row) {
+ for (int k = 0; k < row.n; k++) mat->data[i][k] += row.data[k];
+}
+void add_mat(struct mat *out, struct mat in) {
+ for (int i = 0; i < out->rows; i++)
+ add_row(out, i, get_row(in, i));
+}
+
+// FORWARD PROPAGATION OPERATIONS
+float vv(struct vec A, struct vec B) {
+ float result = 0.;
+ for (size_t i = 0; i < A.n; i++)
+ result += A.data[i] * B.data[i];
+ return result;
+}
+
+struct vec mv(struct mat mat, struct vec vec) {
+ struct vec out = {mat.rows, {0}};
+ for (size_t i = 0; i < mat.rows; i++)
+ out.data[i] = vv(get_row(mat, i), vec);
+ return out;
+}
+
+struct vec v_relu(struct vec vec) {
+ for (size_t i = 0; i < vec.n; i++)
+ vec.data[i] = (vec.data[i] > 0.) ? vec.data[i] : 0.;
+ return vec;
+}
+
+// BACKWARD PROPAGATION OPERATIONS
+static struct vec vv_bp(struct vec constant, struct vec variable, float out_delta) {
+ struct vec out = {constant.n, {0}};
+ for (size_t i = 0; i < out.n; i++)
+ out.data[i] = constant.data[i] * out_delta;
+ return out;
+}
+
+struct vec mv_bp_v(struct mat constant, struct vec variable, struct vec out_deltas) {
+ struct vec in_deltas = {variable.n, {0}};
+ for (size_t i = 0; i < out_deltas.n; i++)
+ add_vec(&in_deltas,
+ vv_bp(get_row(constant, i), variable, out_deltas.data[i]));
+ return in_deltas;
+}
+
+struct mat mv_bp_m(struct mat variable, struct vec constant, struct vec out_deltas) {
+ struct mat weight_deltas = {variable.rows, variable.cols, {0}};
+ for (size_t i = 0; i < out_deltas.n; i++)
+ add_row(&weight_deltas, i,
+ vv_bp(constant, get_row(variable, i), out_deltas.data[i]));
+ return weight_deltas;
+}
+
+struct vec v_relu_bp(struct vec vec_in, struct vec deltas) {
+ for (size_t i = 0; i < deltas.n; i++)
+ deltas.data[i] = (vec_in.data[i] > 0.) ? deltas.data[i] : 0.;
+ return deltas;
+}
+
+static float randf() {
+ int centered = rand() - (RAND_MAX / 2);
+ return ((float)centered) / (RAND_MAX / 2);
+}
+
+struct mat m_random(size_t rows, size_t cols) {
+ struct mat mat = {rows, cols, {0}};
+ for (size_t i = 0; i < rows; i++)
+ for (size_t j = 0; j < cols; j++)
+ mat.data[i][j] = randf();
+ return mat;
+}
+
+void print_vec(struct vec vec) {
+ printf("[");
+ for (size_t i = 0; i < vec.n; i++)
+ printf("%f, ", vec.data[i]);
+ printf("]\n");
+}
generated by cgit on debian on lair
contact matthew@masot.net with questions or feedback