NNExperiments/src/main.nim

42 lines
1.7 KiB
Nim

import nn/network
import nn/util/matrix
import std/math
# Mean squared error
proc mse(a, b: Matrix[float]): float =
result = (b - a).apply(proc (x: float): float = pow(x, 2), axis = -1).sum() / len(a).float
# Derivative of MSE
func dxMSE*(x, y: float): float = 2 * (x - y)
func dx*(x, y: float): float = 0.0
# A bunch of vectorized activation functions
func sigmoid*(input: Matrix[float]): Matrix[float] =
result = input.apply(proc (x: float): float = 1 / (1 + exp(-x)) , axis = -1)
func softmax*(input: Matrix[float]): Matrix[float] =
var input = input - input.max()
result = input.apply(math.exp, axis = -1) / input.apply(math.exp, axis = -1).sum()
func step*(input: Matrix[float]): Matrix[float] = input.apply(proc (x: float): float = (if x < 0.0: 0.0 else: x), axis = -1)
func silu*(input: Matrix[float]): Matrix[float] = input.apply(proc (x: float): float = 1 / (1 + exp(-x)), axis= -1)
func relu*(input: Matrix[float]): Matrix[float] = input.apply(proc (x: float): float = max(0.0, x), axis = -1)
func htan*(input: Matrix[float]): Matrix[float] =
let f = proc (x: float): float =
let temp = exp(2 * x)
result = (temp - 1) / (temp + 1)
input.apply(f, axis = -1)
var mlp = newNeuralNetwork(@[newDenseLayer(2, 3, newActivation(sigmoid, dx)), newDenseLayer(3, 2, newActivation(sigmoid, dx)),
newDenseLayer(2, 3, newActivation(softmax, dx))],
lossFunc=newLoss(mse, dxMSE),
learnRate=0.05, weightRange=(start: -1.0, stop: 1.0), biasRange=(start: -10.0, stop: 10.0),
momentum=0.55)
echo mlp.feedforward(newMatrix[float](@[1.0, 2.0]))