diff options
author | Raghuram Subramani <raghus2247@gmail.com> | 2024-06-09 08:50:49 +0530 |
---|---|---|
committer | Raghuram Subramani <raghus2247@gmail.com> | 2024-06-09 08:50:49 +0530 |
commit | 194f7d40561485f5ac3a3556721cfbc542be3b07 (patch) | |
tree | 49edbcc9f1daf378396a8dd6a31c0078df3976f6 /example.py | |
parent | 40240b0b383abc2d3e81e2bcfe5e4b6d6fdfec2a (diff) |
Update
Diffstat (limited to '')
-rwxr-xr-x | gradient_descent.py (renamed from example.py) | 29 |
1 files changed, 0 insertions, 29 deletions
diff --git a/example.py b/gradient_descent.py index 66df486..3f7f124 100755 --- a/example.py +++ b/gradient_descent.py @@ -3,35 +3,6 @@ from src.scalar import Scalar from src.graph import Graph -# Manual Backpropagation - -# a = Scalar(2, label='a') -# b = Scalar(-3, label='b') -# c = Scalar(10, label='c') -# f = Scalar(-2, label='f') -# -# d = a * b; d.label = 'd' -# e = d + c; e.label = 'e' -# L = e * f; L.label = 'L' -# -# print(f'L before gradient descent: {L.data}') -# -# L.backward() -# -# g = Graph(L) -# -# for x in [a, b, c, f]: -# x.data += 0.01 * x.grad -# -# d = a * b -# e = d + c -# L = e * f -# -# print(f'L after gradient descent: {L.data}') -# g.show() - -# Neuron - x1 = Scalar(2, label='x1') x2 = Scalar(0, label='x2') |