diff options
author | Raghuram Subramani <raghus2247@gmail.com> | 2024-06-08 18:45:09 +0530 |
---|---|---|
committer | Raghuram Subramani <raghus2247@gmail.com> | 2024-06-08 18:45:09 +0530 |
commit | 3bc24933f4128e76ccbd6e37155ff6cccb20a182 (patch) | |
tree | f4d4b01bb0a025b2114958ae778de7cc1275690c /example.py | |
parent | fd9a31f983ef2cfd5c1a89be25fa9e262082737b (diff) |
Automate backward propagation
Diffstat (limited to '')
-rwxr-xr-x | example.py | 92 |
1 files changed, 57 insertions, 35 deletions
@@ -1,50 +1,72 @@ #!/usr/bin/env python from src.scalar import Scalar +from src.graph import Graph -h = 0.0001 +# Manual Backpropagation -# def one(): -# a = Scalar(2, label='a') -# b = Scalar(-3, label='b') -# c = Scalar(10, label='c') -# f = Scalar(-2, label='f') +# a = Scalar(2, label='a') +# b = Scalar(-3, label='b') +# c = Scalar(10, label='c') +# f = Scalar(-2, label='f') +# +# d = a * b; d.label = 'd' +# e = d + c; e.label = 'e' +# L = e * f; L.label = 'L' # -# d = a * b; d.label = 'd' -# e = d + c; e.label = 'e' -# L = e * f; L.label = 'L' +# print(f'L before gradient descent: {L.data}') # -# return L.data +# L.grad = 1.0 +# e.grad = -2.0 +# f.grad = 4.0 +# d.grad = -2.0 +# c.grad = -2.0 +# a.grad = 6.0 +# b.grad = -4.0 # -# def two(): -# a = Scalar(2, label='a') -# b = Scalar(-3, label='b') -# c = Scalar(10, label='c') -# f = Scalar(-2, label='f') +# g = Graph(L) # -# d = a * b; d.label = 'd' -# d.data += h -# e = d + c; e.label = 'e' -# L = e * f; L.label = 'L' +# for x in [a, b, c, f]: +# x.data += 0.01 * x.grad # -# return L.data +# d = a * b +# e = d + c +# L = e * f # -# print((two() - one()) / h) +# print(f'L after gradient descent: {L.data}') +# g.show() -a = Scalar(2, label='a') -b = Scalar(-3, label='b') -c = Scalar(10, label='c') -f = Scalar(-2, label='f') +# Neuron -d = a * b; d.label = 'd' -e = d + c; e.label = 'e' -L = e * f; L.label = 'L' +x1 = Scalar(2, label='x1') +x2 = Scalar(0, label='x2') -L.grad = 1.0 -e.grad = -2.0 -f.grad = 4.0 -d.grad = -2.0 -c.grad = -2.0 +w1 = Scalar(-3, label='w1') +w2 = Scalar(1, label='w2') -from src.graph import Graph -Graph(L).show() +b = Scalar(6.7, label='b') + +x1w1 = x1 * w1; x1w1.label = 'x1w1' +x2w2 = x2 * w2; x2w2.label = 'x2w2' + +x1w1x2w2 = x1w1 + x2w2; x1w1x2w2.label = 'x1w1 + x2w2' + +L = x1w1x2w2 + b; L.label = 'L' +o = L.tanh(); o.label = 'o' + +# o.grad = 1.0 +# L.grad = 1 - (o.data ** 2) +# b.grad = L.grad +# x1w1x2w2.grad = L.grad +# x1w1.grad = x1w1x2w2.grad +# x2w2.grad = x1w1x2w2.grad +# +# x1.grad = w1.data * x1w1.grad +# w1.grad = x1.data * x1w1.grad +# +# x2.grad = w2.data * x2w2.grad +# w2.grad = x2.data * x2w2.grad + +o.backward() + +Graph(o).show() |