diff options
author | Raghuram Subramani <raghus2247@gmail.com> | 2024-06-09 08:50:49 +0530 |
---|---|---|
committer | Raghuram Subramani <raghus2247@gmail.com> | 2024-06-09 08:50:49 +0530 |
commit | 194f7d40561485f5ac3a3556721cfbc542be3b07 (patch) | |
tree | 49edbcc9f1daf378396a8dd6a31c0078df3976f6 | |
parent | 40240b0b383abc2d3e81e2bcfe5e4b6d6fdfec2a (diff) |
Update
-rwxr-xr-x | gradient_descent.py (renamed from example.py) | 29 | ||||
-rwxr-xr-x | neural_network.py | 17 | ||||
-rw-r--r-- | src/nn.py | 37 |
3 files changed, 54 insertions, 29 deletions
diff --git a/example.py b/gradient_descent.py index 66df486..3f7f124 100755 --- a/example.py +++ b/gradient_descent.py @@ -3,35 +3,6 @@ from src.scalar import Scalar from src.graph import Graph -# Manual Backpropagation - -# a = Scalar(2, label='a') -# b = Scalar(-3, label='b') -# c = Scalar(10, label='c') -# f = Scalar(-2, label='f') -# -# d = a * b; d.label = 'd' -# e = d + c; e.label = 'e' -# L = e * f; L.label = 'L' -# -# print(f'L before gradient descent: {L.data}') -# -# L.backward() -# -# g = Graph(L) -# -# for x in [a, b, c, f]: -# x.data += 0.01 * x.grad -# -# d = a * b -# e = d + c -# L = e * f -# -# print(f'L after gradient descent: {L.data}') -# g.show() - -# Neuron - x1 = Scalar(2, label='x1') x2 = Scalar(0, label='x2') diff --git a/neural_network.py b/neural_network.py new file mode 100755 index 0000000..62f3f9c --- /dev/null +++ b/neural_network.py @@ -0,0 +1,17 @@ +#!/usr/bin/env python + +from src.nn import MLP + +X = [ + [ 0.0, 0.0, 0.0 ], + [ 1.0, 1.0, 1.0 ], + [ 2.0, 2.0, 2.0 ], + [ 3.0, 3.0, 3.0 ] +] + +y = [ 1.0, 2.0, 3.0, 4.0 ] # X + 1 + +n = MLP(3, [ 4, 4, 1 ]) + +pred = [ n(x) for x in X ] +print(pred) diff --git a/src/nn.py b/src/nn.py new file mode 100644 index 0000000..e5d20d4 --- /dev/null +++ b/src/nn.py @@ -0,0 +1,37 @@ +from .scalar import Scalar +import random + +class Neuron: + def __init__(self, n_X): + self.n_X = n_X + self.w = [ Scalar(random.uniform(-1, 1)) for _ in range(n_X) ] + self.b = Scalar(random.uniform(-1, 1)) + + def __call__(self, X): + result = 0 + + for wi, Xi in zip(self.w, X): + result += wi * Xi + + result += self.b + + return result.tanh() + +class Layer: + def __init__(self, n_X, n_y): + self.neurons = [ Neuron(n_X) for _ in range(n_y) ] + + def __call__(self, X): + result = [ n(X) for n in self.neurons ] + return result[0] if len(result) == 1 else result + +class MLP: + def __init__(self, n_X, layers): + sz = [ n_X ] + layers + self.layers = [ Layer(sz[i], sz[i + 1]) for i in range(len(layers)) ] + + def __call__(self, X): + for layer in self.layers: + X = layer(X) + + return X |