Hands-On Analogy: Build a Mini Autodiff Engine
Using raw Python (no libraries), we’ll mirror TensorFlow’s core idea: a Tensor
that remembers how it was made, so we can backprop. For illustration, here is a minimal scalar/vector autodiff to mirror TensorFlow's gradient tapes.
This is not TensorFlow, but it rhymes with its GradientTape
: track ops → compute loss → call backward()
→ update weights.
xxxxxxxxxx
72
print(f"step={step} loss={loss.data:.4f} w={w.data:.3f} b={b.data:.3f}")
class Tensor:
def __init__(self, data, parents=(), op="leaf"):
self.data = data # number or list[float]
self.grad = 0.0 # dL/dThis (scalar engine for clarity)
self.parents = parents # upstream nodes
self.op = op # for debug
def __add__(self, other):
other = other if isinstance(other, Tensor) else Tensor(other)
out = Tensor(self.data + other.data, (self, other), "add")
def _backward():
self.grad += out.grad
other.grad += out.grad
out._backward = _backward
return out
def __mul__(self, other):
other = other if isinstance(other, Tensor) else Tensor(other)
out = Tensor(self.data * other.data, (self, other), "mul")
def _backward():
self.grad += other.data * out.grad
other.grad += self.data * out.grad
out._backward = _backward
return out
def relu(self):
out = Tensor(self.data if self.data > 0 else 0.0, (self,), "relu")
def _backward():
self.grad += (1.0 if self.data > 0 else 0.0) * out.grad
OUTPUT
:001 > Cmd/Ctrl-Enter to run, Cmd/Ctrl-/ to comment