diff --git a/docs/neural-network/cost-functions/cross-entropy.md b/docs/neural-network/cost-functions/cross-entropy.md
index be11bba4f..2a3e96811 100644
--- a/docs/neural-network/cost-functions/cross-entropy.md
+++ b/docs/neural-network/cost-functions/cross-entropy.md
@@ -12,7 +12,7 @@ This cost function does not have any parameters.
## Example
```php
-use Rubix\ML\NeuralNet\CostFunctions\CrossEntropy;
+use Rubix\ML\NeuralNet\CostFunctions\CrossEntropy\CrossEntropy;
$costFunction = new CrossEntropy();
-```
\ No newline at end of file
+```
diff --git a/docs/neural-network/cost-functions/huber-loss.md b/docs/neural-network/cost-functions/huber-loss.md
index e82950152..fa08c3f93 100644
--- a/docs/neural-network/cost-functions/huber-loss.md
+++ b/docs/neural-network/cost-functions/huber-loss.md
@@ -5,10 +5,10 @@ The pseudo Huber Loss function transitions between L1 and L2 loss at a given piv
$$
L_{\delta}=
- \left\{\begin{matrix}
- \frac{1}{2}(y - \hat{y})^{2} & if \left | (y - \hat{y}) \right | < \delta\\
- \delta ((y - \hat{y}) - \frac1 2 \delta) & otherwise
- \end{matrix}\right.
+ \begin{cases}
+ \frac{1}{2}(y - \hat{y})^{2} & \text{if } |y - \hat{y}| < \delta\\
+ \delta ((y - \hat{y}) - \frac{1}{2} \delta) & \text{otherwise}
+ \end{cases}
$$
## Parameters
@@ -21,4 +21,4 @@ $$
use Rubix\ML\NeuralNet\CostFunctions\HuberLoss;
$costFunction = new HuberLoss(0.5);
-```
\ No newline at end of file
+```
diff --git a/docs/neural-network/cost-functions/least-squares.md b/docs/neural-network/cost-functions/least-squares.md
index 22044c779..de13db2f8 100644
--- a/docs/neural-network/cost-functions/least-squares.md
+++ b/docs/neural-network/cost-functions/least-squares.md
@@ -12,7 +12,7 @@ This cost function does not have any parameters.
## Example
```php
-use Rubix\ML\NeuralNet\CostFunctions\LeastSquares;
+use Rubix\ML\NeuralNet\CostFunctions\LeastSquares\LeastSquares;
$costFunction = new LeastSquares();
-```
\ No newline at end of file
+```
diff --git a/docs/neural-network/cost-functions/mean-absolute-error.md b/docs/neural-network/cost-functions/mean-absolute-error.md
new file mode 100644
index 000000000..1878af14a
--- /dev/null
+++ b/docs/neural-network/cost-functions/mean-absolute-error.md
@@ -0,0 +1,18 @@
+[source]
+
+# Mean Absolute Error
+Mean Absolute Error (MAE) measures the average magnitude of errors between predicted and actual values without considering their direction. It is a linear score which means all individual differences are weighted equally. MAE is more robust to outliers compared to Mean Squared Error (MSE) because it doesn't square the differences.
+
+$$
+MAE = \frac{1}{n}\sum_{i=1}^{n}|y_i - \hat{y}_i|
+$$
+
+## Parameters
+This cost function does not have any parameters.
+
+## Example
+```php
+use Rubix\ML\NeuralNet\CostFunctions\MeanAbsoluteError\MeanAbsoluteError;
+
+$costFunction = new MeanAbsoluteError();
+```
diff --git a/docs/neural-network/cost-functions/relative-entropy.md b/docs/neural-network/cost-functions/relative-entropy.md
index 6bae7830e..3ff28a6ee 100644
--- a/docs/neural-network/cost-functions/relative-entropy.md
+++ b/docs/neural-network/cost-functions/relative-entropy.md
@@ -12,7 +12,7 @@ This cost function does not have any parameters.
## Example
```php
-use Rubix\ML\NeuralNet\CostFunctions\RelativeEntropy;
+use Rubix\ML\NeuralNet\CostFunctions\RelativeEntropy\RelativeEntropy;
$costFunction = new RelativeEntropy();
-```
\ No newline at end of file
+```
diff --git a/src/NeuralNet/ActivationFunctions/GELU/GELU.php b/src/NeuralNet/ActivationFunctions/GELU/GELU.php
index 710e7e919..e043dd418 100644
--- a/src/NeuralNet/ActivationFunctions/GELU/GELU.php
+++ b/src/NeuralNet/ActivationFunctions/GELU/GELU.php
@@ -33,7 +33,10 @@ class GELU implements ActivationFunction, IBufferDerivative
* @var float
*/
protected const ALPHA = 0.7978845608;
- /** @var float 0.5 * ALPHA */
+
+ /**
+ * @var float 0.5 * ALPHA
+ */
protected const HALF_ALPHA = 0.3989422804;
/**
@@ -42,7 +45,10 @@ class GELU implements ActivationFunction, IBufferDerivative
* @var float
*/
protected const BETA = 0.044715;
- /** @var float 3 * BETA */
+
+ /**
+ * @var float 3 * BETA
+ */
protected const TRIPLE_BETA = 0.134145;
/**
diff --git a/src/NeuralNet/CostFunctions/Base/Contracts/ClassificationLoss.php b/src/NeuralNet/CostFunctions/Base/Contracts/ClassificationLoss.php
new file mode 100644
index 000000000..15548e7c8
--- /dev/null
+++ b/src/NeuralNet/CostFunctions/Base/Contracts/ClassificationLoss.php
@@ -0,0 +1,10 @@
+
+ */
+interface CostFunction extends Stringable
+{
+ /**
+ * Compute the loss score.
+ *
+ * @param NDArray $output
+ * @param NDArray $target
+ * @return float
+ */
+ public function compute(NDArray $output, NDArray $target) : float;
+
+ /**
+ * Calculate the gradient of the cost function with respect to the output.
+ *
+ * @param NDArray $output
+ * @param NDArray $target
+ * @return NDArray
+ */
+ public function differentiate(NDArray $output, NDArray $target) : NDArray;
+}
diff --git a/src/NeuralNet/CostFunctions/Base/Contracts/RegressionLoss.php b/src/NeuralNet/CostFunctions/Base/Contracts/RegressionLoss.php
new file mode 100644
index 000000000..61cf58434
--- /dev/null
+++ b/src/NeuralNet/CostFunctions/Base/Contracts/RegressionLoss.php
@@ -0,0 +1,10 @@
+
+ */
+class CrossEntropy implements ClassificationLoss
+{
+ use AssertsShapes;
+
+ /**
+ * Compute the loss score.
+ *
+ * L(y, ŷ) = -Σ(y * log(ŷ)) / n
+ *
+ * @param NDArray $output The output of the network
+ * @param NDArray $target The target values
+ * @return float
+ */
+ public function compute(NDArray $output, NDArray $target) : float
+ {
+ $this->assertSameShape($output, $target);
+
+ // Clip values to avoid log(0)
+ $output = NumPower::clip($output, EPSILON, 1.0);
+
+ $logOutput = NumPower::log($output);
+ $product = NumPower::multiply($target, $logOutput);
+ $negated = NumPower::multiply($product, -1.0);
+
+ return NumPower::mean($negated);
+ }
+
+ /**
+ * Calculate the gradient of the cost function with respect to the output.
+ *
+ * ∂L/∂ŷ = (ŷ - y) / (ŷ * (1 - ŷ))
+ *
+ * @param NDArray $output The output of the network
+ * @param NDArray $target The target values
+ * @return NDArray
+ */
+ public function differentiate(NDArray $output, NDArray $target) : NDArray
+ {
+ $this->assertSameShape($output, $target);
+
+ // Numerator = ŷ - y (calculate before clipping to preserve zeros)
+ $numerator = NumPower::subtract($output, $target);
+
+ // Clip values to avoid division by zero
+ $output = NumPower::clip($output, EPSILON, 1.0 - EPSILON);
+
+ // Denominator = ŷ * (1 - ŷ)
+ $oneMinusOutput = NumPower::subtract(1.0, $output);
+ $denominator = NumPower::multiply($output, $oneMinusOutput);
+ $denominator = NumPower::clip($denominator, EPSILON, 1.0);
+
+ return NumPower::divide($numerator, $denominator);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return 'Cross Entropy';
+ }
+}
diff --git a/src/NeuralNet/CostFunctions/HuberLoss/Exceptions/InvalidAlphaException.php b/src/NeuralNet/CostFunctions/HuberLoss/Exceptions/InvalidAlphaException.php
new file mode 100644
index 000000000..4d5c68755
--- /dev/null
+++ b/src/NeuralNet/CostFunctions/HuberLoss/Exceptions/InvalidAlphaException.php
@@ -0,0 +1,14 @@
+
+ */
+class HuberLoss implements RegressionLoss
+{
+ use AssertsShapes;
+
+ /**
+ * The alpha quantile i.e the pivot point at which numbers larger will be
+ * evalutated with an L1 loss while number smaller will be evalutated with
+ * an L2 loss.
+ *
+ * @var float
+ */
+ protected float $alpha;
+
+ /**
+ * The square of the alpha parameter.
+ *
+ * @var float
+ */
+ protected float $alpha2;
+
+ /**
+ * @param float $alpha
+ * @throws InvalidAlphaException
+ */
+ public function __construct(float $alpha = 0.9)
+ {
+ if ($alpha <= 0.0) {
+ throw new InvalidAlphaException('Alpha must be greater than 0, ' . $alpha . ' given.');
+ }
+
+ $this->alpha = $alpha;
+ $this->alpha2 = $alpha ** 2;
+ }
+
+ /**
+ * Compute the loss score.
+ *
+ * L(y, ŷ) = α²(√(1 + ((y - ŷ)/α)²) - 1)
+ *
+ * @internal
+ *
+ * @param NDArray $output The output of the network
+ * @param NDArray $target The target values
+ * @return float
+ */
+ public function compute(NDArray $output, NDArray $target) : float
+ {
+ $this->assertSameShape($output, $target);
+
+ $difference = NumPower::subtract($target, $output);
+ $scaled = NumPower::divide($difference, $this->alpha);
+ $squared = NumPower::pow($scaled, 2);
+ $sqrt = NumPower::sqrt(NumPower::add($squared, 1.0));
+ $loss = NumPower::multiply($this->alpha2, NumPower::subtract($sqrt, 1.0));
+
+ return NumPower::mean($loss);
+ }
+
+ /**
+ * Calculate the gradient of the cost function with respect to the output.
+ *
+ * ∂L/∂ŷ = (ŷ - y) / √(α² + (ŷ - y)²)
+ *
+ * @internal
+ *
+ * @param NDArray $output The output of the network
+ * @param NDArray $target The target values
+ * @return NDArray
+ */
+ public function differentiate(NDArray $output, NDArray $target) : NDArray
+ {
+ $this->assertSameShape($output, $target);
+
+ $difference = NumPower::subtract($output, $target);
+ $squared = NumPower::pow($difference, 2);
+ $denominator = NumPower::sqrt(NumPower::add($squared, $this->alpha2));
+
+ return NumPower::divide($difference, $denominator);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return 'Huber Loss (alpha: ' . $this->alpha . ')';
+ }
+}
diff --git a/src/NeuralNet/CostFunctions/LeastSquares/LeastSquares.php b/src/NeuralNet/CostFunctions/LeastSquares/LeastSquares.php
new file mode 100644
index 000000000..4825cba7d
--- /dev/null
+++ b/src/NeuralNet/CostFunctions/LeastSquares/LeastSquares.php
@@ -0,0 +1,72 @@
+
+ */
+class LeastSquares implements RegressionLoss
+{
+ use AssertsShapes;
+
+ /**
+ * Compute the loss score.
+ *
+ * L(y, ŷ) = Σ(y - ŷ)^2 / n
+ *
+ * @param NDArray $output The output of the network
+ * @param NDArray $target The target values
+ * @return float
+ */
+ public function compute(NDArray $output, NDArray $target) : float
+ {
+ $this->assertSameShape($output, $target);
+
+ $difference = NumPower::subtract($output, $target);
+ $squared = NumPower::pow($difference, 2);
+
+ // Compute mean of all elements
+ return NumPower::mean($squared);
+ }
+
+ /**
+ * Calculate the gradient of the cost function with respect to the output.
+ *
+ * ∂L/∂ŷ = y - ŷ
+ *
+ * @param NDArray $output The output of the network
+ * @param NDArray $target The target values
+ * @return NDArray
+ */
+ public function differentiate(NDArray $output, NDArray $target) : NDArray
+ {
+ $this->assertSameShape($output, $target);
+
+ return NumPower::subtract($output, $target);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return 'Least Squares';
+ }
+}
diff --git a/src/NeuralNet/CostFunctions/MeanAbsoluteError/MeanAbsoluteError.php b/src/NeuralNet/CostFunctions/MeanAbsoluteError/MeanAbsoluteError.php
new file mode 100644
index 000000000..887b8e035
--- /dev/null
+++ b/src/NeuralNet/CostFunctions/MeanAbsoluteError/MeanAbsoluteError.php
@@ -0,0 +1,74 @@
+
+ */
+class MeanAbsoluteError implements RegressionLoss
+{
+ use AssertsShapes;
+
+ /**
+ * Compute the loss score.
+ *
+ * L(y, ŷ) = Σ|y - ŷ| / n
+ *
+ * @param NDArray $output The output of the network
+ * @param NDArray $target The target values
+ * @return float
+ */
+ public function compute(NDArray $output, NDArray $target) : float
+ {
+ $this->assertSameShape($output, $target);
+
+ $difference = NumPower::subtract($output, $target);
+ $absolute = NumPower::abs($difference);
+
+ return NumPower::mean($absolute);
+ }
+
+ /**
+ * Calculate the gradient of the cost function with respect to the output.
+ *
+ * ∂L/∂ŷ = sign(ŷ - y)
+ *
+ * @param NDArray $output The output of the network
+ * @param NDArray $target The target values
+ * @return NDArray
+ */
+ public function differentiate(NDArray $output, NDArray $target) : NDArray
+ {
+ $this->assertSameShape($output, $target);
+
+ $difference = NumPower::subtract($output, $target);
+
+ return NumPower::sign($difference);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return 'Mean Absolute Error';
+ }
+}
diff --git a/src/NeuralNet/CostFunctions/RelativeEntropy/RelativeEntropy.php b/src/NeuralNet/CostFunctions/RelativeEntropy/RelativeEntropy.php
new file mode 100644
index 000000000..4f51337fe
--- /dev/null
+++ b/src/NeuralNet/CostFunctions/RelativeEntropy/RelativeEntropy.php
@@ -0,0 +1,87 @@
+
+ */
+class RelativeEntropy implements ClassificationLoss
+{
+ use AssertsShapes;
+
+ /**
+ * Compute the loss.
+ *
+ * L(y, ŷ) = Σ(y * log(y / ŷ)) / n
+ *
+ * @internal
+ *
+ * @param NDArray $output The output of the network
+ * @param NDArray $target The target values
+ * @return float
+ */
+ public function compute(NDArray $output, NDArray $target) : float
+ {
+ $this->assertSameShape($output, $target);
+
+ // Clip values to avoid log(0)
+ $target = NumPower::clip($target, EPSILON, 1.0);
+ $output = NumPower::clip($output, EPSILON, 1.0);
+
+ $ratio = NumPower::divide($target, $output);
+ $logRatio = NumPower::log($ratio);
+ $product = NumPower::multiply($target, $logRatio);
+
+ return NumPower::mean($product);
+ }
+
+ /**
+ * Calculate the gradient of the cost function with respect to the output.
+ *
+ * ∂L/∂ŷ = (ŷ - y) / ŷ
+ *
+ * @internal
+ *
+ * @param NDArray $output The output of the network
+ * @param NDArray $target The target values
+ * @return NDArray
+ */
+ public function differentiate(NDArray $output, NDArray $target) : NDArray
+ {
+ $this->assertSameShape($output, $target);
+
+ // Clip values to avoid division by zero
+ $target = NumPower::clip($target, EPSILON, 1.0);
+ $output = NumPower::clip($output, EPSILON, 1.0);
+
+ $diff = NumPower::subtract($output, $target);
+
+ return NumPower::divide($diff, $output);
+ }
+
+ /**
+ * Return the string representation of the object.
+ *
+ * @return string
+ */
+ public function __toString() : string
+ {
+ return 'Relative Entropy';
+ }
+}
diff --git a/src/Traits/AssertsShapes.php b/src/Traits/AssertsShapes.php
new file mode 100644
index 000000000..7fabc316f
--- /dev/null
+++ b/src/Traits/AssertsShapes.php
@@ -0,0 +1,35 @@
+
+ */
+trait AssertsShapes
+{
+ /**
+ * Assert that the output and target NDArrays have identical shapes.
+ *
+ * @param NDArray $output The output array to check.
+ * @param NDArray $target The target array to compare against.
+ * @throws InvalidArgumentException If the shapes do not match.
+ */
+ protected function assertSameShape(NDArray $output, NDArray $target) : void
+ {
+ if ($output->shape() !== $target->shape()) {
+ throw new InvalidArgumentException('Output and target must have identical shapes.');
+ }
+ }
+}
diff --git a/tests/NeuralNet/ActivationFunctions/Softmax/SoftmaxTest.php b/tests/NeuralNet/ActivationFunctions/Softmax/SoftmaxTest.php
index 89909593d..339036b90 100644
--- a/tests/NeuralNet/ActivationFunctions/Softmax/SoftmaxTest.php
+++ b/tests/NeuralNet/ActivationFunctions/Softmax/SoftmaxTest.php
@@ -48,7 +48,7 @@ public static function computeProvider() : Generator
[
[0.3097901, 0.4762271, 0.2139827],
[0.5671765, 0.2283022, 0.2045210],
- [0.312711, 0.176846, 0.510443]
+ [0.312711, 0.176846, 0.510443],
],
];
@@ -68,8 +68,8 @@ public static function computeProvider() : Generator
[3, 4],
]),
[
- [0.2689414 ,0.7310585],
- [0.2689414 ,0.7310585],
+ [0.2689414, 0.7310585],
+ [0.2689414, 0.7310585],
],
];
}
@@ -105,7 +105,7 @@ public static function differentiateProvider() : Generator
// Test 2x2 matrix
yield [
NumPower::array([
- [0.2689414 ,0.7310585],
+ [0.2689414, 0.7310585],
]),
[
[0.1966119, -0.19661192],
@@ -136,7 +136,7 @@ public static function sumToOneProvider() : Generator
NumPower::array([
[0.1, 0.2, 0.3, 0.4],
[5.0, 4.0, 3.0, 2.0],
- [-1.0, -2.0, -3.0, -4.0]
+ [-1.0, -2.0, -3.0, -4.0],
]),
];
}
diff --git a/tests/NeuralNet/CostFunctions/CrossEntropy/CrossEntropyTest.php b/tests/NeuralNet/CostFunctions/CrossEntropy/CrossEntropyTest.php
new file mode 100644
index 000000000..dd96dd195
--- /dev/null
+++ b/tests/NeuralNet/CostFunctions/CrossEntropy/CrossEntropyTest.php
@@ -0,0 +1,198 @@
+costFn = new CrossEntropy();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Cross Entropy', (string) $this->costFn);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when output and target shapes do not match in compute')]
+ public function testComputeThrowsExceptionOnShapeMismatch() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ $this->expectExceptionMessage('Output and target must have the same shape.');
+
+ $output = NumPower::array([[1.0, 2.0, 3.0]]);
+ $target = NumPower::array([[1.0, 2.0]]);
+
+ $this->costFn->compute($output, $target);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when output and target shapes do not match in differentiate')]
+ public function testDifferentiateThrowsExceptionOnShapeMismatch() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ $this->expectExceptionMessage('Output and target must have the same shape.');
+
+ $output = NumPower::array([[1.0, 2.0, 3.0]]);
+ $target = NumPower::array([[1.0, 2.0]]);
+
+ $this->costFn->differentiate($output, $target);
+ }
+
+ #[Test]
+ #[TestDox('Compute loss score')]
+ #[DataProvider('computeProvider')]
+ public function testCompute(NDArray $output, NDArray $target, float $expected) : void
+ {
+ $loss = $this->costFn->compute($output, $target);
+
+ if (is_nan($expected)) {
+ self::assertNan($loss);
+ } else {
+ self::assertEqualsWithDelta($expected, $loss, 1e-7);
+ }
+ }
+
+ #[Test]
+ #[TestDox('Calculate gradient of cost function')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $output, NDArray $target, array $expected) : void
+ {
+ $gradient = $this->costFn->differentiate($output, $target);
+
+ $gradientArray = $gradient->toArray();
+
+ self::assertEqualsWithDelta($expected, $gradientArray, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/CostFunctions/HuberLoss/HuberLossTest.php b/tests/NeuralNet/CostFunctions/HuberLoss/HuberLossTest.php
new file mode 100644
index 000000000..5339454ac
--- /dev/null
+++ b/tests/NeuralNet/CostFunctions/HuberLoss/HuberLossTest.php
@@ -0,0 +1,194 @@
+costFn = new HuberLoss(1.0);
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Huber Loss (alpha: 1)', (string) $this->costFn);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when constructed with invalid alpha parameter')]
+ public function testConstructorWithInvalidAlpha() : void
+ {
+ $this->expectException(InvalidAlphaException::class);
+
+ new HuberLoss(-1);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when output and target shapes do not match in compute')]
+ public function testComputeThrowsExceptionOnShapeMismatch() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ $this->expectExceptionMessage('Output and target must have the same shape.');
+
+ $output = NumPower::array([[1.0, 2.0, 3.0]]);
+ $target = NumPower::array([[1.0, 2.0]]);
+
+ $this->costFn->compute($output, $target);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when output and target shapes do not match in differentiate')]
+ public function testDifferentiateThrowsExceptionOnShapeMismatch() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ $this->expectExceptionMessage('Output and target must have the same shape.');
+
+ $output = NumPower::array([[1.0, 2.0, 3.0]]);
+ $target = NumPower::array([[1.0, 2.0]]);
+
+ $this->costFn->differentiate($output, $target);
+ }
+
+ #[Test]
+ #[TestDox('Compute loss score')]
+ #[DataProvider('computeProvider')]
+ public function testCompute(NDArray $output, NDArray $target, float $expected) : void
+ {
+ $loss = $this->costFn->compute($output, $target);
+
+ if (is_nan($expected)) {
+ self::assertNan($loss);
+ } else {
+ self::assertEqualsWithDelta($expected, $loss, 1e-7);
+ }
+ }
+
+ #[Test]
+ #[TestDox('Calculate gradient of cost function')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $output, NDArray $target, array $expected) : void
+ {
+ $gradient = $this->costFn->differentiate($output, $target);
+ $gradientArray = $gradient->toArray();
+ self::assertEqualsWithDelta($expected, $gradientArray, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/CostFunctions/LeastSquares/LeastSquaresTest.php b/tests/NeuralNet/CostFunctions/LeastSquares/LeastSquaresTest.php
new file mode 100644
index 000000000..c50474b1c
--- /dev/null
+++ b/tests/NeuralNet/CostFunctions/LeastSquares/LeastSquaresTest.php
@@ -0,0 +1,196 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([]),
+ NumPower::array([]),
+ NAN,
+ ];
+
+ yield [
+ NumPower::array([
+ [0.99],
+ ]),
+ NumPower::array([
+ [1.0],
+ ]),
+ 0.0001000,
+ ];
+
+ yield [
+ NumPower::array([
+ [1000.0],
+ ]),
+ NumPower::array([
+ [1.0],
+ ]),
+ 998001.0,
+ ];
+
+ yield [
+ NumPower::array([
+ [33.98],
+ [20.0],
+ [4.6],
+ [44.2],
+ [38.5],
+ ]),
+ NumPower::array([
+ [36.0],
+ [22.0],
+ [18.0],
+ [41.5],
+ [38.0],
+ ]),
+ 39.0360794,
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [0.99],
+ ]),
+ NumPower::array([
+ [1.0],
+ ]),
+ [
+ [-0.0099999],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [1000.0],
+ ]),
+ NumPower::array([
+ [1.0],
+ ]),
+ [
+ [999.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [33.98],
+ [20.0],
+ [4.6],
+ [44.2],
+ [38.5],
+ ]),
+ NumPower::array([
+ [36.0],
+ [22.0],
+ [18.0],
+ [41.5],
+ [38.0],
+ ]),
+ [
+ [-2.0200004],
+ [-2.0],
+ [-13.3999996],
+ [2.7000007],
+ [0.5],
+ ],
+ ];
+ }
+
+ protected function setUp() : void
+ {
+ $this->costFn = new LeastSquares();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Least Squares', (string) $this->costFn);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when output and target shapes do not match in compute')]
+ public function testComputeThrowsExceptionOnShapeMismatch() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ $this->expectExceptionMessage('Output and target must have the same shape.');
+
+ $output = NumPower::array([[1.0, 2.0, 3.0]]);
+ $target = NumPower::array([[1.0, 2.0]]);
+
+ $this->costFn->compute($output, $target);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when output and target shapes do not match in differentiate')]
+ public function testDifferentiateThrowsExceptionOnShapeMismatch() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ $this->expectExceptionMessage('Output and target must have the same shape.');
+
+ $output = NumPower::array([[1.0, 2.0, 3.0]]);
+ $target = NumPower::array([[1.0, 2.0]]);
+
+ $this->costFn->differentiate($output, $target);
+ }
+
+ #[Test]
+ #[TestDox('Compute loss score')]
+ #[DataProvider('computeProvider')]
+ public function testCompute(NDArray $output, NDArray $target, float $expected) : void
+ {
+ $loss = $this->costFn->compute($output, $target);
+
+ if (is_nan($expected)) {
+ self::assertNan($loss);
+ } else {
+ self::assertEqualsWithDelta($expected, $loss, 1e-7);
+ }
+ }
+
+ #[Test]
+ #[TestDox('Calculate gradient of cost function')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $output, NDArray $target, array $expected) : void
+ {
+ $gradient = $this->costFn->differentiate($output, $target);
+
+ // Convert NDArray to PHP array for comparison
+ $gradientArray = $gradient->toArray();
+
+ self::assertEqualsWithDelta($expected, $gradientArray, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/CostFunctions/MeanAbsoluteError/MeanAbsoluteErrorTest.php b/tests/NeuralNet/CostFunctions/MeanAbsoluteError/MeanAbsoluteErrorTest.php
new file mode 100644
index 000000000..b10a63d06
--- /dev/null
+++ b/tests/NeuralNet/CostFunctions/MeanAbsoluteError/MeanAbsoluteErrorTest.php
@@ -0,0 +1,228 @@
+
+ */
+ public static function computeProvider() : Generator
+ {
+ yield [
+ NumPower::array([]),
+ NumPower::array([]),
+ NAN,
+ ];
+
+ yield [
+ NumPower::array([
+ [0.99],
+ ]),
+ NumPower::array([
+ [1.0],
+ ]),
+ 0.01,
+ ];
+
+ yield [
+ NumPower::array([
+ [1000.0],
+ ]),
+ NumPower::array([
+ [1.0],
+ ]),
+ 999.0,
+ ];
+
+ yield [
+ NumPower::array([
+ [33.98],
+ [20.0],
+ [4.6],
+ [44.2],
+ [38.5],
+ ]),
+ NumPower::array([
+ [36.0],
+ [22.0],
+ [18.0],
+ [41.5],
+ [38.0],
+ ]),
+ 4.124,
+ ];
+
+ yield [
+ NumPower::array([
+ [10.0],
+ [-5.0],
+ [3.5],
+ ]),
+ NumPower::array([
+ [8.0],
+ [-3.0],
+ [5.0],
+ ]),
+ 1.8333333,
+ ];
+ }
+
+ /**
+ * @return Generator
+ */
+ public static function differentiateProvider() : Generator
+ {
+ yield [
+ NumPower::array([
+ [0.99],
+ ]),
+ NumPower::array([
+ [1.0],
+ ]),
+ [
+ [-1.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [1000.0],
+ ]),
+ NumPower::array([
+ [1.0],
+ ]),
+ [
+ [1.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [33.98],
+ [20.0],
+ [4.6],
+ [44.2],
+ [38.5],
+ ]),
+ NumPower::array([
+ [36.0],
+ [22.0],
+ [18.0],
+ [41.5],
+ [38.0],
+ ]),
+ [
+ [-1.0],
+ [-1.0],
+ [-1.0],
+ [1.0],
+ [1.0],
+ ],
+ ];
+
+ yield [
+ NumPower::array([
+ [10.0],
+ [-5.0],
+ [3.5],
+ ]),
+ NumPower::array([
+ [8.0],
+ [-3.0],
+ [5.0],
+ ]),
+ [
+ [1.0],
+ [-1.0],
+ [-1.0],
+ ],
+ ];
+ }
+
+ protected function setUp() : void
+ {
+ $this->costFn = new MeanAbsoluteError();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Mean Absolute Error', (string) $this->costFn);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when output and target shapes do not match in compute')]
+ public function testComputeThrowsExceptionOnShapeMismatch() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ $this->expectExceptionMessage('Output and target must have the same shape.');
+
+ $output = NumPower::array([[1.0, 2.0, 3.0]]);
+ $target = NumPower::array([[1.0, 2.0]]);
+
+ $this->costFn->compute($output, $target);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when output and target shapes do not match in differentiate')]
+ public function testDifferentiateThrowsExceptionOnShapeMismatch() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ $this->expectExceptionMessage('Output and target must have the same shape.');
+
+ $output = NumPower::array([[1.0, 2.0, 3.0]]);
+ $target = NumPower::array([[1.0, 2.0]]);
+
+ $this->costFn->differentiate($output, $target);
+ }
+
+ #[Test]
+ #[TestDox('Compute loss score')]
+ #[DataProvider('computeProvider')]
+ public function testCompute(NDArray $output, NDArray $target, float $expected) : void
+ {
+ $loss = $this->costFn->compute($output, $target);
+
+ if (is_nan($expected)) {
+ self::assertNan($loss);
+ } else {
+ self::assertEqualsWithDelta($expected, $loss, 1e-7);
+ }
+ }
+
+ #[Test]
+ #[TestDox('Calculate gradient of cost function')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $output, NDArray $target, array $expected) : void
+ {
+ $gradient = $this->costFn->differentiate($output, $target);
+
+ // Convert NDArray to PHP array for comparison
+ $gradientArray = $gradient->toArray();
+
+ self::assertEqualsWithDelta($expected, $gradientArray, 1e-7);
+ }
+}
diff --git a/tests/NeuralNet/CostFunctions/RelativeEntropy/RelativeEntropyTest.php b/tests/NeuralNet/CostFunctions/RelativeEntropy/RelativeEntropyTest.php
new file mode 100644
index 000000000..4cd9b9955
--- /dev/null
+++ b/tests/NeuralNet/CostFunctions/RelativeEntropy/RelativeEntropyTest.php
@@ -0,0 +1,198 @@
+costFn = new RelativeEntropy();
+ }
+
+ #[Test]
+ #[TestDox('Can be cast to a string')]
+ public function testToString() : void
+ {
+ static::assertEquals('Relative Entropy', (string) $this->costFn);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when output and target shapes do not match in compute')]
+ public function testComputeThrowsExceptionOnShapeMismatch() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ $this->expectExceptionMessage('Output and target must have the same shape.');
+
+ $output = NumPower::array([[1.0, 2.0, 3.0]]);
+ $target = NumPower::array([[1.0, 2.0]]);
+
+ $this->costFn->compute($output, $target);
+ }
+
+ #[Test]
+ #[TestDox('Throws exception when output and target shapes do not match in differentiate')]
+ public function testDifferentiateThrowsExceptionOnShapeMismatch() : void
+ {
+ $this->expectException(InvalidArgumentException::class);
+ $this->expectExceptionMessage('Output and target must have the same shape.');
+
+ $output = NumPower::array([[1.0, 2.0, 3.0]]);
+ $target = NumPower::array([[1.0, 2.0]]);
+
+ $this->costFn->differentiate($output, $target);
+ }
+
+ #[Test]
+ #[TestDox('Compute loss score')]
+ #[DataProvider('computeProvider')]
+ public function testCompute(NDArray $output, NDArray $target, float $expected) : void
+ {
+ $loss = $this->costFn->compute($output, $target);
+
+ if (is_nan($expected)) {
+ self::assertNan($loss);
+ } else {
+ self::assertEqualsWithDelta($expected, $loss, 1e-7);
+ }
+ }
+
+ #[Test]
+ #[TestDox('Calculate gradient of cost function')]
+ #[DataProvider('differentiateProvider')]
+ public function testDifferentiate(NDArray $output, NDArray $target, array $expected) : void
+ {
+ $gradient = $this->costFn->differentiate($output, $target);
+
+ $gradientArray = $gradient->toArray();
+
+ self::assertEqualsWithDelta($expected, $gradientArray, 1e-7);
+ }
+}