From b11b351d7e2689656964c7849e489f21b7e0f8bb Mon Sep 17 00:00:00 2001 From: Mattia Giambirtone Date: Mon, 20 Mar 2023 12:40:01 +0100 Subject: [PATCH] Minor updates --- src/nn/network.nim | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/nn/network.nim b/src/nn/network.nim index 1c98b79..47c2a38 100644 --- a/src/nn/network.nim +++ b/src/nn/network.nim @@ -156,6 +156,7 @@ proc mse(a, b: Matrix[float]): float = func dxMSE(x, y: Matrix[float]): Matrix[float] = 2.0 * (x - y) # A bunch of vectorized activation functions + func sigmoid(input: Matrix[float]): Matrix[float] = result = input.apply(proc (x: float): float = 1 / (1 + exp(-x)) , axis = -1) @@ -166,12 +167,12 @@ func softmax(input: Matrix[float]): Matrix[float] = var input = input - input.max() result = input.apply(math.exp, axis = -1) / input.apply(math.exp, axis = -1).sum() - func softmaxDerivative(input: Matrix[float]): Matrix[float] = var input = input.reshape(input.shape.cols, 1) result = input.diagflat() - input.dot(input.transpose()) +# TODO: Add derivatives for this stuff func step(input: Matrix[float]): Matrix[float] {.used.} = input.apply(proc (x: float): float = (if x < 0.0: 0.0 else: x), axis = -1) func silu(input: Matrix[float]): Matrix[float] {.used.} = input.apply(proc (x: float): float = 1 / (1 + exp(-x)), axis= -1) func relu(input: Matrix[float]): Matrix[float] {.used.} = input.apply(proc (x: float): float = max(0.0, x), axis = -1)