Gradient-Descent

Micrograd

Code

  import math

  class Value:
    def __init__(self, data, _children=(), _op=''):
      self.data = data
      self.grad = 0.0
      self._backward = lambda :None
      self._prev = set(_children)
      self._op = _op

    def __repr__(self):
      return f"Value(data={self.data}, grad={self.grad})"

    def __add__(self, other):
      other = other if isinstance(other, Value) else Value(other)
      out = Value(self.data + other.data, (self, other), '+')

      def _backward():
	  self.grad += 1.0 * out.grad
	  other.grad += 1.0 * out.grad
      out._backward = _backward
      return out
    def __radd__(self, other):
      return self + other

    def __mul__(self, other):
      other = other if isinstance(other, Value) else Value(other)
      out = Value(self.data * other.data, (self, other), '*')

      def _backward():
	self.grad += other.data * out.grad
	other.grad += self.data * out.grad
      out._backward = _backward
      return out

    def __rmul__(self, other): # karpathy knows too much
      return self * other

    def __truediv__(self, other):
      return self * other**-1

    def __sub__(self, other):
      return self + (other * -1) #LOL

    def __pow__(self, other):
      assert isinstance(other, (int, float))
      out = Value(self.data ** other, (self, ), f'**{other}')

      def _backward():
	self.grad += other * self.data**(other-1) * out.grad
      out._backward = _backward
      return out

    def tanh(self):
      x = self.data
      t = (math.exp(2*x)-1)/(math.exp(2*x)+1)
      out = Value(t, (self, ), 'tanh')
      def _backward():
	self.grad += (1 - t**2) * out.grad
      out._backward = _backward
      return out

    def exp(self):
      x = self.data
      out = Value(math.exp(x), (self, ), 'exp')
      def _backward():
	self.grad += out.data * out.grad
      out._backward = _backward
      return out

    def backward(self):
      """implements topological sort and calls the _backward method in a reversed order"""
      topo = []
      visited = set()
      def build_topo(v):
	if v not in visited:
	    visited.add(v)
	    for child in v._prev:
	      build_topo(child)
	    topo.append(v)
      build_topo(self)

      self.grad = 1.0
      for node in reversed(topo):
	  node._backward()
  import random
  class Neuron:
    def __init__(self, nin):
      # nin = number of inputs
      self.w = [Value(random.uniform(-1,1)) for _ in range(nin)]
      self.b = Value(random.uniform(-1,1))

    def parameters(self):
      return self.w + [self.b]

    def __call__(self, x):
      # this is wild; you can call an instance of Neuron as n(x) :O
      # x are the inputs, we are taking a dot product:
      act = sum((wi*xi for wi, xi in zip(self.w, x)), self.b) # sum can take an optional starting value: self.b
      out = act.tanh()
      return out

  class Layer:
    def __init__(self, nin, nout):
      self.neurons = [Neuron(nin) for _ in range(nout)]

    def __call__(self, x):
      outs = [n(x) for n in self.neurons]
      return outs[0] if len(outs) == 1 else outs

    def parameters(self):
      return [p for neuron in self.neurons for p in neuron.parameters()]

  class MLP:
    def __init__(self, nin, nouts):
      sz = [nin] + nouts
      self.layers = [Layer(sz[i], sz[i+1]) for i in range(len(nouts))]

    def __call__(self, x):
      for layer in self.layers:
	x = layer(x)
      return x

    def parameters(self):
      return [p for layer in self.layers for p in layer.parameters()]
  n = MLP(3, [4,4,1])
  xs = [
      [2.0, 3.0, -1, 0],
      [3.0, -1.0, 0.5],
      [0.5, 1.0, 1.0],
      [1.0, 1.0, -1.0]
      ]
  ys = [1.0, -1.0, -1.0, 1.0]
  for k in range(20):
    ypred = [n(x) for x in xs]
    loss = sum((yout - ygt)**2 for ygt, yout in zip(ys, ypred))

    for p in n.parameters(): # without this, you accumulate the gradients and create an artificial momentum.
      p.grad = 0.0
    loss.backward()
    
    for p in n.parameters():
        p.data += -0.01 * p.grad
    print(k, loss.data)
  0 6.827977524064868
  1 6.564699072251051
  2 6.231394384341386
  3 5.851082578630887
  4 5.468387820425157
  5 5.114416471232759
  6 4.795337579218638
  7 4.5056247601912105
  8 4.237419335600638
  9 3.9834296130886995
  10 3.7377002240069004
  11 3.4958631215074445
  12 3.2552411075315213
  13 3.0149114257272585
  14 2.775727346928721
  15 2.5401826675264108
  16 2.3119463645904297
  17 2.0950143399155294
  18 1.8927285043107416
  19 1.7071191357642472
ypred
Value (data=0.5774378654478666 grad=-0.8451242691042669) Value (data=-0.708228299005086 grad=0.583543401989828) Value (data=-0.2457719356156506 grad=1.5084561287686988) Value (data=0.06481569752237379 grad=-1.8703686049552524)

References

https://www.youtube.com/watch?v=VMj-3S1tku0&list=PLAqhIrjkxbuWI23v9cThsA9GvCAUhRvKZ

Read more >