Skip to content

Commit

Permalink
fix ut
Browse files Browse the repository at this point in the history
  • Loading branch information
xadupre committed Sep 7, 2024
1 parent 441e09c commit 27bbc0d
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 24 deletions.
46 changes: 26 additions & 20 deletions _unittests/ut_ml/test_neural_tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor, export_graphviz
from sklearn.datasets import load_iris
from sklearn.tree import export_text
from mlstatpy.ext_test_case import ExtTestCase
from mlstatpy.ext_test_case import ExtTestCase, ignore_warnings
from onnx_array_api.plotting.text_plot import onnx_simple_text_plot
from mlstatpy.ml.neural_tree import (
NeuralTreeNode,
Expand All @@ -25,7 +25,7 @@ def test_neural_tree_node(self):
st = repr(neu)
self.assertEqual(
"NeuralTreeNode(weights=array([0., 1.]), "
"bias=0.5, activation='identity')",
"bias=np.float64(0.5), activation='identity')",
st,
)
st = io.BytesIO()
Expand All @@ -39,7 +39,7 @@ def test_neural_tree_network(self):
X = numpy.random.randn(2, 3)
got = net.predict(X)
exp = X.sum(axis=1)
self.assertEqual(exp.reshape((-1, 1)), got[:, -1:])
self.assertEqualArray(exp.reshape((-1, 1)), got[:, -1:])
rep = repr(net)
self.assertEqual(rep, "NeuralTreeNet(3)")
net.clear()
Expand Down Expand Up @@ -139,7 +139,7 @@ def test_neural_tree_network_training_weights(self):
w = net.training_weights
self.assertEqual(w.shape, (6,))
self.assertEqual(w[0], 0)
self.assertEqualArray(w[1:4], [1, 1, 1])
self.assertEqualArray(w[1:4], numpy.array([1, 1, 1], dtype=float))
delta = numpy.arange(6) - 0.5
net.update_training_weights(delta)
w2 = net.training_weights
Expand Down Expand Up @@ -188,7 +188,7 @@ def test_gradients(self):
with self.subTest(act=act):
neu = NeuralTreeNode(w, bias=b, activation=act)
pred = neu.predict(X)
self.assertAlmostEqual(numpy.sum(pred), 1.0)
self.assertAlmostEqual(numpy.sum(pred), 1.0, atol=1e-10)
self.assertEqual(pred.shape, (2,))
grad = neu.gradient_backward(g, X)
self.assertEqual(grad.shape, (2, 4))
Expand Down Expand Up @@ -279,8 +279,8 @@ def test_label_class_to_softmax_output(self):
)
soft_y = label_class_to_softmax_output(y_label)
self.assertEqual(soft_y.shape, (4, 2))
self.assertEqual(soft_y[:, 1], y_label)
self.assertEqual(soft_y[:, 0], 1 - y_label)
self.assertEqualArray(soft_y[:, 1], y_label.astype(float))
self.assertEqualArray(soft_y[:, 0], 1 - y_label.astype(float))

def test_neural_net_gradient(self):
X = numpy.arange(8).astype(numpy.float64).reshape((-1, 2))
Expand Down Expand Up @@ -317,6 +317,7 @@ def test_neural_net_gradient_regression(self):
self.assertEqualArray(loss1, loss2, atol=1e-5)
self.assertEqualArray(grad1, grad2, atol=1e-5)

@ignore_warnings(DeprecationWarning)
def test_neural_net_gradient_regression_2(self):
X = numpy.abs(numpy.random.randn(10, 2))
w1 = numpy.array([-0.5, 0.8, -0.6])
Expand Down Expand Up @@ -348,7 +349,7 @@ def test_neural_net_gradient_regression_2(self):
pred2 = net.predict(X)
loss2 = net.loss(X, y)

self.assertEqualArray(pred1, pred2[:, -1])
self.assertEqualArray(pred1, pred2[:, -1], atol=1e-10)
self.assertEqualArray(pred2[:, -2], pred2[:, -1])
self.assertEqualArray(pred2[:, 2], pred2[:, 3])
self.assertEqualArray(loss1, loss2, atol=1e-7)
Expand All @@ -358,6 +359,7 @@ def test_neural_net_gradient_regression_2(self):
grad2 = net.gradient(X[p], y[p])
self.assertEqualArray(grad1, grad2[:3], atol=1e-7)

@ignore_warnings(DeprecationWarning)
def test_neural_net_gradient_regression_2_h2(self):
X = numpy.abs(numpy.random.randn(10, 2))
w1 = numpy.array([-0.5, 0.8, -0.6])
Expand Down Expand Up @@ -410,8 +412,8 @@ def test_neural_net_gradient_regression_2_h2(self):
pred2 = net.predict(X)
loss2 = net.loss(X, y)

self.assertEqualArray(pred1, pred2[:, -1])
self.assertEqualArray(pred2[:, 2], pred2[:, -1])
self.assertEqualArray(pred1, pred2[:, -1], atol=1e-8)
self.assertEqualArray(pred2[:, 2], pred2[:, -1], atol=1e-10)
self.assertEqualArray(loss1, loss2, atol=1e-7)

for p in range(5):
Expand Down Expand Up @@ -484,6 +486,7 @@ def test_shape_dim2(self):
loss = neu.loss(X, numpy.zeros((X.shape[0], 1), dtype=numpy.float64))
self.assertEqual(loss.shape, (10, 2))

@ignore_warnings(DeprecationWarning)
def test_convert_compact(self):
X = numpy.arange(8).astype(numpy.float64).reshape((-1, 2))
y = ((X[:, 0] + X[:, 1] * 2) > 10).astype(numpy.int64)
Expand Down Expand Up @@ -585,8 +588,9 @@ def test_convert_compact_skl_onnx(self):
self.assertIn("Softmax(", text)
oinf = ReferenceEvaluator(onx)
got2 = oinf.run(None, {"X": x32})[0]
self.assertEqualArray(exp[:, 1], got2, atol=1e-5)
self.assertEqualArray(exp[:, 1], got2.astype(float).ravel(), atol=1e-5)

@ignore_warnings(DeprecationWarning)
def test_convert_reg_compact(self):
X = numpy.arange(32).astype(numpy.float64).reshape((-1, 2))
y = (X[:, 0] + X[:, 1] * 2).astype(numpy.float64)
Expand All @@ -612,10 +616,11 @@ def test_convert_reg_compact(self):
self.assertNotEmpty(root)
exp = tree.predict(X)
got = root.predict(X)
self.assertEqualArray(exp, got[:, -1], decimal=6)
self.assertEqualArray(exp, got[:, -1], atol=1e-6)
dot = root.to_dot()
self.assertIn("9 -> 17", dot)

@ignore_warnings(DeprecationWarning)
def test_convert_compact_skl_reg(self):
X = numpy.arange(8).astype(numpy.float64).reshape((-1, 2))
y = X[:, 0] + X[:, 1] * 2
Expand All @@ -626,12 +631,13 @@ def test_convert_compact_skl_reg(self):
exp = tree.predict(X)
got = root.predict(X)
self.assertEqual(exp.shape[0], got.shape[0])
self.assertEqualArray(exp, got[:, -1])
self.assertEqualArray(exp, got[:, -1], atol=1e-7)

skl = NeuralTreeNetRegressor(root)
prob = skl.predict(X)
self.assertEqualArray(exp, prob.ravel())
self.assertEqualArray(exp, prob.ravel(), atol=1e-7)

@ignore_warnings(DeprecationWarning)
def test_convert_compact_skl_fit_reg(self):
X = numpy.arange(8).astype(numpy.float64).reshape((-1, 2))
y = X[:, 0] + X[:, 1] * 2
Expand All @@ -642,8 +648,9 @@ def test_convert_compact_skl_fit_reg(self):
skl.fit(X, y)
exp = tree.predict(X)
got = skl.predict(X)
self.assertEqualArray(exp, got.ravel())
self.assertEqualArray(exp, got.ravel(), atol=1e-7)

@ignore_warnings(DeprecationWarning)
def test_convert_compact_skl_onnx_reg(self):
from skl2onnx import to_onnx
from onnx.reference import ReferenceEvaluator
Expand All @@ -656,9 +663,9 @@ def test_convert_compact_skl_onnx_reg(self):
skl = NeuralTreeNetRegressor(root)
got = skl.predict(X)
exp = tree.predict(X)
self.assertEqualArray(exp, got.ravel())
self.assertEqualArray(exp, got.ravel(), atol=1e-7)
dec = root.predict(X)
self.assertEqualArray(exp, dec[:, -1])
self.assertEqualArray(exp, dec[:, -1], atol=1e-7)

x32 = X.astype(numpy.float32)
onx = to_onnx(skl, x32, target_opset=15)
Expand All @@ -667,9 +674,8 @@ def test_convert_compact_skl_onnx_reg(self):
self.assertNotIn("Softmax(", text)
oinf = ReferenceEvaluator(onx)
got2 = oinf.run(None, {"X": x32})[0]
self.assertEqualArray(exp, got2.ravel())
self.assertEqualArray(exp, got2.ravel().astype(float))


if __name__ == "__main__":
# TestNeuralTree().test_convert_reg_compact()
unittest.main()
unittest.main(verbosity=2)
25 changes: 22 additions & 3 deletions mlstatpy/ext_test_case.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,6 +341,25 @@ def assertExists(self, name):
if not os.path.exists(name):
raise AssertionError(f"File or folder {name!r} does not exists.")

def assertEqual(self, *args, **kwargs):
if isinstance(args[0], numpy.ndarray):
self.assertEqualArray(*args, **kwargs)
else:
super().assertEqual(*args, **kwargs)

def assertNotEqualArray(
self,
expected: numpy.ndarray,
value: numpy.ndarray,
atol: float = 0,
rtol: float = 0,
):
try:
self.assertEqualArray(expected, value, atol=atol, rtol=rtol)
except AssertionError:
return
raise AssertionError("Both arrays are equal.")

def assertEqualArray(
self,
expected: numpy.ndarray,
Expand All @@ -365,11 +384,11 @@ def assertAlmostEqual(
value = numpy.array(value).astype(expected.dtype)
self.assertEqualArray(expected, value, atol=atol, rtol=rtol)

def assertRaise(self, fct: Callable, exc_type: Exception):
def assertRaise(self, fct: Callable, exc_type: Optional[Exception] = None):
try:
fct()
except exc_type as e:
if not isinstance(e, exc_type):
except exc_type or Exception as e:
if exc_type is not None and not isinstance(e, exc_type):
raise AssertionError(f"Unexpected exception {type(e)!r}.") # noqa: B904
return
raise AssertionError("No exception was raised.")
Expand Down
6 changes: 5 additions & 1 deletion mlstatpy/optim/sgd.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import numpy
from numpy.core._exceptions import UFuncTypeError

try:
from numpy.core._exceptions import UFuncTypeError
except ImportError:
UFuncTypeError = Exception


class BaseOptimizer:
Expand Down

0 comments on commit 27bbc0d

Please sign in to comment.