Skip to content

Commit

Permalink
feat (optim): add cross-entropy, sgd
Browse files Browse the repository at this point in the history
  • Loading branch information
ahmedalycess committed Aug 6, 2024
1 parent 1010ef3 commit 1eaf060
Show file tree
Hide file tree
Showing 3 changed files with 59 additions and 0 deletions.
39 changes: 39 additions & 0 deletions Optimization/Loss.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import numpy as np

class CrossEntropyLoss:
def __init__(self):
self.input = None
self.epsilon = np.finfo(float).eps

def forward(self, input_tensor: np.ndarray, label_tensor: np.ndarray) -> float:
'''
forward pass of the cross entropy loss
Literature: H(P, Q) = -sum(P * log(Q)) : P is label_tensor, Q is input_tensor
Modification:
added epsilon to avoid log(0) --> log(0) = -inf
Inputs:
input_tensor: np.ndarray
label_tensor: np.ndarray
Expected Output:
loss: float
'''
self.input = input_tensor
input_tensor = input_tensor[label_tensor==1]
loss = -np.sum(np.log(input_tensor + self.epsilon))
return loss


def backward(self, label_tensor) -> np.ndarray:
'''
backward pass of the cross entropy loss
Literature: dH/dQ = -P/Q : P is label_tensor, Q is input_tensor
Modification:
added epsilon to avoid division by zero
Inputs:
label_tensor: np.ndarray
Expected Output:
gradient: np.ndarray
'''
gradient = -label_tensor/(self.input + self.epsilon)
return gradient

19 changes: 19 additions & 0 deletions Optimization/Optimizers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
import numpy as np

class Sgd:
'''
Stochastic Gradient Descent optimizer class
Equation:
w_new = w_old - learning_rate * gradient of loss function w.r.t weight tensor
Position in the NN pipeline:
After the backward pass to update the weights of the model
'''
def __init__(self, learning_rate: float) -> None:
self.learning_rate = learning_rate

def calculate_update(self, weight_tensor: np.ndarray, gradient_tensor: np.ndarray) -> np.ndarray:
return weight_tensor - self.learning_rate * gradient_tensor


1 change: 1 addition & 0 deletions Optimization/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
__all__ = ["Optimizers", "Loss"]

0 comments on commit 1eaf060

Please sign in to comment.