aboutsummaryrefslogtreecommitdiff
path: root/example.py
diff options
context:
space:
mode:
Diffstat (limited to 'example.py')
-rwxr-xr-xexample.py92
1 files changed, 57 insertions, 35 deletions
diff --git a/example.py b/example.py
index 4c00845..502db98 100755
--- a/example.py
+++ b/example.py
@@ -1,50 +1,72 @@
#!/usr/bin/env python
from src.scalar import Scalar
+from src.graph import Graph
-h = 0.0001
+# Manual Backpropagation
-# def one():
-# a = Scalar(2, label='a')
-# b = Scalar(-3, label='b')
-# c = Scalar(10, label='c')
-# f = Scalar(-2, label='f')
+# a = Scalar(2, label='a')
+# b = Scalar(-3, label='b')
+# c = Scalar(10, label='c')
+# f = Scalar(-2, label='f')
+#
+# d = a * b; d.label = 'd'
+# e = d + c; e.label = 'e'
+# L = e * f; L.label = 'L'
#
-# d = a * b; d.label = 'd'
-# e = d + c; e.label = 'e'
-# L = e * f; L.label = 'L'
+# print(f'L before gradient descent: {L.data}')
#
-# return L.data
+# L.grad = 1.0
+# e.grad = -2.0
+# f.grad = 4.0
+# d.grad = -2.0
+# c.grad = -2.0
+# a.grad = 6.0
+# b.grad = -4.0
#
-# def two():
-# a = Scalar(2, label='a')
-# b = Scalar(-3, label='b')
-# c = Scalar(10, label='c')
-# f = Scalar(-2, label='f')
+# g = Graph(L)
#
-# d = a * b; d.label = 'd'
-# d.data += h
-# e = d + c; e.label = 'e'
-# L = e * f; L.label = 'L'
+# for x in [a, b, c, f]:
+# x.data += 0.01 * x.grad
#
-# return L.data
+# d = a * b
+# e = d + c
+# L = e * f
#
-# print((two() - one()) / h)
+# print(f'L after gradient descent: {L.data}')
+# g.show()
-a = Scalar(2, label='a')
-b = Scalar(-3, label='b')
-c = Scalar(10, label='c')
-f = Scalar(-2, label='f')
+# Neuron
-d = a * b; d.label = 'd'
-e = d + c; e.label = 'e'
-L = e * f; L.label = 'L'
+x1 = Scalar(2, label='x1')
+x2 = Scalar(0, label='x2')
-L.grad = 1.0
-e.grad = -2.0
-f.grad = 4.0
-d.grad = -2.0
-c.grad = -2.0
+w1 = Scalar(-3, label='w1')
+w2 = Scalar(1, label='w2')
-from src.graph import Graph
-Graph(L).show()
+b = Scalar(6.7, label='b')
+
+x1w1 = x1 * w1; x1w1.label = 'x1w1'
+x2w2 = x2 * w2; x2w2.label = 'x2w2'
+
+x1w1x2w2 = x1w1 + x2w2; x1w1x2w2.label = 'x1w1 + x2w2'
+
+L = x1w1x2w2 + b; L.label = 'L'
+o = L.tanh(); o.label = 'o'
+
+# o.grad = 1.0
+# L.grad = 1 - (o.data ** 2)
+# b.grad = L.grad
+# x1w1x2w2.grad = L.grad
+# x1w1.grad = x1w1x2w2.grad
+# x2w2.grad = x1w1x2w2.grad
+#
+# x1.grad = w1.data * x1w1.grad
+# w1.grad = x1.data * x1w1.grad
+#
+# x2.grad = w2.data * x2w2.grad
+# w2.grad = x2.data * x2w2.grad
+
+o.backward()
+
+Graph(o).show()