Two-Layer Network Implementation
Here is a minimal 2-layer MLP for binary classification on a toy dataset using the standard library only.
xxxxxxxxxx
121
train_mlp()
import random
import math
def sigmoid(x): # activation for last layer (probability)
return 1.0 / (1.0 + math.exp(-x))
def dsigmoid(y): # derivative given output y = sigmoid(x)
return y * (1.0 - y)
def relu(x):
return x if x > 0 else 0.0
def relu_grad(x):
return 1.0 if x > 0 else 0.0
def dot(a, b):
return sum(x*y for x, y in zip(a, b))
def matvec(W, v):
# W: list of rows, v: vector
return [dot(row, v) for row in W]
def add(v, b):
return [x + y for x, y in zip(v, b)]
def outer(u, v):
# returns matrix: u * v^T
return [[ui * vj for vj in v] for ui in u]
OUTPUT
:001 > Cmd/Ctrl-Enter to run, Cmd/Ctrl-/ to comment