Added more activations & their derivatives

This commit is contained in:
Mattia Giambirtone 2023-03-21 19:26:06 +01:00
parent e3265fac68
commit 7189a21b05
Signed by: nocturn9x
GPG Key ID: 8270F9F467971E59
1 changed files with 12 additions and 7 deletions

View File

@ -277,10 +277,13 @@ proc mse(a, b: Matrix[float]): float =
# Derivative of MSE
func dxMSE(x, y: Matrix[float]): Matrix[float] = 2.0 * (x - y)
func sigmoid(x: float): float = 1 / (1 + exp(-x))
# A bunch of vectorized activation functions
func sigmoid(input: Matrix[float]): Matrix[float] =
result = input.apply(proc (x: float): float = 1 / (1 + exp(-x)) , axis = -1)
result = input.apply(sigmoid, axis = -1)
func sigmoidDerivative(input: Matrix[float]): Matrix[float] = sigmoid(input) * (1.0 - sigmoid(input))
@ -299,25 +302,27 @@ func softmaxDerivative(input: Matrix[float]): Matrix[float] =
# I _love_ stealing functions from numpy!
result = input.diagflat() - input.dot(input.transpose())
func relu(input: Matrix[float]): Matrix[float] {.used.} = input.apply(proc (x: float): float = max(0.0, x), axis = -1)
func relu(input: Matrix[float]): Matrix[float] = input.apply(proc (x: float): float = max(0.0, x), axis = -1)
func dxRelu(input: Matrix[float]): Matrix[float] = input.where(input > 0.0, 0.0)
# TODO: Add derivatives for this stuff
func step(input: Matrix[float]): Matrix[float] {.used.} = input.apply(proc (x: float): float = (if x < 0.0: 0.0 else: x), axis = -1)
func silu(input: Matrix[float]): Matrix[float] {.used.} = input.apply(proc (x: float): float = 1 / (1 + exp(-x)), axis= -1)
func silu(input: Matrix[float]): Matrix[float] = input.apply(proc (x: float): float = x * sigmoid(x), axis= -1)
func dSilu(input: Matrix[float]): Matrix[float] = input.apply(proc (x: float): float = sigmoid(x) * (1 + x * (1 - sigmoid(x))), axis = -1)
func htan(input: Matrix[float]): Matrix[float] {.used.} =
func htan(input: Matrix[float]): Matrix[float] =
let f = proc (x: float): float =
let temp = exp(2 * x)
result = (temp - 1) / (temp + 1)
input.apply(f, axis = -1)
func htanDx(input: Matrix[float]): Matrix[float] = input.apply(proc (x: float): float = 1 - (pow(tanh(x), 2)), axis = -1)
{.push.}
{.hints: off.} # So nim doesn't complain about the naming
var Sigmoid* = newActivation(sigmoid, sigmoidDerivative)
var Softmax* = newActivation(softmax, softmaxDerivative)
var ReLU* = newActivation(relu, dxRelu)
var SiLU* = newActivation(silu, dSilu)
var HTan* = newActivation(htan, htanDx)
var MSE* = newLoss(mse, dxMSE)
{.pop.}