import math class Value: def __init__(self, data, _children=(), _op=''): self.data = data self.grad = 0.0 self._backward = lambda :None self._prev = set(_children) self._op = _op def __repr__(self): return f"Value(data={self.data}, grad={self.grad})" def __add__(self, other): other = other if isinstance(other, Value) else Value(other) out = Value(self.data + other.data, (self, other), '+') def _backward(): self.grad += 1.0 * out.grad other.grad += 1.0 * out.grad out._backward = _backward return out def __radd__(self, other): return self + other def __mul__(self, other): other = other if isinstance(other, Value) else Value(other) out = Value(self.data * other.data, (self, other), '*') def _backward(): self.grad += other.data * out.grad other.grad += self.data * out.grad out._backward = _backward return out def __rmul__(self, other): # karpathy knows too much return self * other def __truediv__(self, other): return self * other**-1 def __sub__(self, other): return self + (other * -1) #LOL def __pow__(self, other): assert isinstance(other, (int, float)) out = Value(self.data ** other, (self, ), f'**{other}') def _backward(): self.grad += other * self.data**(other-1) * out.grad out._backward = _backward return out def tanh(self): x = self.data t = (math.exp(2*x)-1)/(math.exp(2*x)+1) out = Value(t, (self, ), 'tanh') def _backward(): self.grad += (1 - t**2) * out.grad out._backward = _backward return out def exp(self): x = self.data out = Value(math.exp(x), (self, ), 'exp') def _backward(): self.grad += out.data * out.grad out._backward = _backward return out def backward(self): """implements topological sort and calls the _backward method in a reversed order""" topo = [] visited = set() def build_topo(v): if v not in visited: visited.add(v) for child in v._prev: build_topo(child) topo.append(v) build_topo(self) self.grad = 1.0 for node in reversed(topo): node._backward() import random class Neuron: def __init__(self, nin): # nin = number of inputs self.w = [Value(random.uniform(-1,1)) for _ in range(nin)] self.b = Value(random.uniform(-1,1)) def parameters(self): return self.w + [self.b] def __call__(self, x): # this is wild; you can call an instance of Neuron as n(x) :O # x are the inputs, we are taking a dot product: act = sum((wi*xi for wi, xi in zip(self.w, x)), self.b) # sum can take an optional starting value: self.b out = act.tanh() return out class Layer: def __init__(self, nin, nout): self.neurons = [Neuron(nin) for _ in range(nout)] def __call__(self, x): outs = [n(x) for n in self.neurons] return outs[0] if len(outs) == 1 else outs def parameters(self): return [p for neuron in self.neurons for p in neuron.parameters()] class MLP: def __init__(self, nin, nouts): sz = [nin] + nouts self.layers = [Layer(sz[i], sz[i+1]) for i in range(len(nouts))] def __call__(self, x): for layer in self.layers: x = layer(x) return x def parameters(self): return [p for layer in self.layers for p in layer.parameters()]