Minor updates
This commit is contained in:
parent
2c4f94130c
commit
b11b351d7e
|
@ -156,6 +156,7 @@ proc mse(a, b: Matrix[float]): float =
|
|||
func dxMSE(x, y: Matrix[float]): Matrix[float] = 2.0 * (x - y)
|
||||
|
||||
# A bunch of vectorized activation functions
|
||||
|
||||
func sigmoid(input: Matrix[float]): Matrix[float] =
|
||||
result = input.apply(proc (x: float): float = 1 / (1 + exp(-x)) , axis = -1)
|
||||
|
||||
|
@ -166,12 +167,12 @@ func softmax(input: Matrix[float]): Matrix[float] =
|
|||
var input = input - input.max()
|
||||
result = input.apply(math.exp, axis = -1) / input.apply(math.exp, axis = -1).sum()
|
||||
|
||||
|
||||
func softmaxDerivative(input: Matrix[float]): Matrix[float] =
|
||||
var input = input.reshape(input.shape.cols, 1)
|
||||
result = input.diagflat() - input.dot(input.transpose())
|
||||
|
||||
|
||||
# TODO: Add derivatives for this stuff
|
||||
func step(input: Matrix[float]): Matrix[float] {.used.} = input.apply(proc (x: float): float = (if x < 0.0: 0.0 else: x), axis = -1)
|
||||
func silu(input: Matrix[float]): Matrix[float] {.used.} = input.apply(proc (x: float): float = 1 / (1 + exp(-x)), axis= -1)
|
||||
func relu(input: Matrix[float]): Matrix[float] {.used.} = input.apply(proc (x: float): float = max(0.0, x), axis = -1)
|
||||
|
|
Loading…
Reference in New Issue