A Tiny Neuron
Here is a tiny neuron implementation. It is a single neuron with ReLU activation, trained with plain gradient descent to learn y ≈ 2*x + 1 on synthetic data. Standard library only.
xxxxxxxxxx
50
train_single_neuron()
import random
import math
def relu(x):
return x if x > 0 else 0.0
def relu_grad(x):
return 1.0 if x > 0 else 0.0
def train_single_neuron(epochs=2000, lr=0.01, seed=42):
random.seed(seed)
# Generate simple 1D data: y = 2x + 1 + noise
xs = [random.uniform(-2.0, 2.0) for _ in range(200)]
ys = [2.0 * x + 1.0 + random.gauss(0, 0.1) for x in xs]
# Parameters of a 1D neuron: w and b
w = random.uniform(-1.0, 1.0)
b = 0.0
for epoch in range(epochs):
dw = 0.0
db = 0.0
loss = 0.0
for x, y in zip(xs, ys):
z = w * x + b
a = relu(z)
# Mean squared error (per sample)
diff = a - y
loss += 0.5 * diff * diff
OUTPUT
:001 > Cmd/Ctrl-Enter to run, Cmd/Ctrl-/ to comment