A mini deep learning library with an automatic differentiation engine for tensors. This library implements backpropagation over a dynamically constructed DAG, along with a lightweight neural networks module.
conda create -n mini-autograd python=3.9.20
conda activate mini-autograd
cd /PATH/TO/mini-autograd
pip install -r requirements.txt
Step 1: It's recommended to use Python version 3.9.20. So please install Python 3.9.20 if it does not exist in your system aready.
python -m venv env
source env/bin/activate
pip install -r requirements.txt
pytest -s
Note: In order to test the project you can individually run the *.ipynb files and see the implementation in action.
from autograd.tensor import Tensor
from autograd.functions import pow
a = Tensor([[1, 2, 3], [4, 5, 6]], requires_grad=True)
b = Tensor([[10, 20, 30], [41, 51, 61]], requires_grad=True)
c = a + b
d = Tensor([[1, 2, 1], [1, 1, 2]], requires_grad=True)
e = c * d - a
f = Tensor([[-1.0, 1.0], [2.0, -1.0], [-1.9, 1.0]], requires_grad=True)
h = pow(e @ f, 2)
h.backward(Tensor([[-1.0, 1.0], [2.0, -1.0]]))
result = [[round(float(x), 3) for x in row ] for row in h.data]
da = [[round(float(x), 3) for x in row ] for row in a.grad.data]
print("Forward Pass via custom autograd engine", result)
print("Back-propagation dh/da (Project)", da)
from autograd.tensor import Tensor
import numpy as np
x = Tensor([12, 0, 11, 5, 8, -9, 2], requires_grad=True) # initialization
print(sum_of_squares(x).data)
y_vals = []
for i in range(20):
x.zero_grad()
y = sum_of_squares(x)
y.backward()
delta_x = Tensor(0.1) * x.grad
x = x - delta_x
y_vals.append(y.data)
Simple Neural Network Training
from autograd.tensor import Tensor, Parameter
from nn.optimizer import SGD
from nn.module import Module
class Model(Module):
def __init__(self) -> None:
self.w = Parameter(3)
self.b = Parameter()
def predict(self, inputs: Tensor) -> Tensor:
return inputs @ self.w + self.b
optimizer = SGD(lr=0.001)
batch_size = 32
model = Model()