From 1c42acca2491769a261de87be4904ba12ae5637e Mon Sep 17 00:00:00 2001 From: Raghuram Subramani Date: Wed, 27 Aug 2025 09:39:20 -0400 Subject: . --- projects/autograd.html | 104 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 projects/autograd.html (limited to 'projects/autograd.html') diff --git a/projects/autograd.html b/projects/autograd.html new file mode 100644 index 0000000..4fe4ae0 --- /dev/null +++ b/projects/autograd.html @@ -0,0 +1,104 @@ + + + + + + + + + + Autograd | COMPROMYSE + + + + + + + + + + + + + + + + + + +
+ + + + +
+ +
+

Autograd

+
+

An implementation of autograd / backpropagation.

+
Python
+
+ + SOURCE +
+ +
+

All you need to run a simple neural network using autograd is the following code:

+ +

The code defines a data set X, expected output (or ground truth) y. It then trains the neural network by performing backward propagation (.backward()), then applies the calculated gradients through .optimise() along with a learning rate of 0.01.

+ +

from src.nn import MLP
+from src.loss import mse
+
+X = [
+    [ 0.0, 1.0, 2.0 ],
+    [ 2.0, 1.0, 0.0 ],
+    [ 2.0, 2.0, 2.0 ],
+    [ 3.0, 3.0, 3.0 ]
+]
+
+y = [ 1.0, -1.0, 1.0, -1.0 ]
+n = MLP(3, [ 4, 4, 1 ])
+
+for i in range(400):
+    pred = [ n(x) for x in X ]
+    loss = mse(y, pred)
+    loss.zero_grad()
+    loss.backward()
+    n.optimise(0.01)
+
+print(pred)
+
+

+ +
+ +
+ + + +
+ + + + -- cgit v1.2.3