Skip to content

Commit

Permalink
temporary disable slow tests
Browse files Browse the repository at this point in the history
  • Loading branch information
irenaby committed Jan 7, 2025
1 parent 627fef0 commit 07109f5
Show file tree
Hide file tree
Showing 2 changed files with 56 additions and 56 deletions.
48 changes: 24 additions & 24 deletions tests/keras_tests/feature_networks_tests/test_features_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -791,30 +791,30 @@ def test_uniform_range_selection_softmax_activation(self):
UniformRangeSelectionBoundedActivationTest(self, QuantizationErrorMethod.LP).run_test()
UniformRangeSelectionBoundedActivationTest(self, QuantizationErrorMethod.KL).run_test()

def test_multi_head_attention(self):
q_seq_len, kv_seq_len = 5, 6
q_dim, k_dim, v_dim = 11, 12, 13
num_heads, qk_proj_dim, v_proj_dim = 3, 4, 7
attention_axes = [1, 3]
num_iterations = 9
for separate_key_value in [False, True]:
MultiHeadAttentionTest(self, [(q_seq_len, q_dim),
(kv_seq_len, k_dim),
(kv_seq_len, v_dim)],
num_heads, qk_proj_dim, v_proj_dim, None,
separate_key_value=separate_key_value, output_dim=15).run_test()
input_shapes = [(2, num_iterations, q_seq_len, q_dim),
(2, num_iterations, kv_seq_len, k_dim),
(2, num_iterations, kv_seq_len, v_dim)]
MultiHeadAttentionTest(self, input_shapes,
num_heads, qk_proj_dim, v_proj_dim, attention_axes,
separate_key_value=separate_key_value, output_dim=14).run_test()
MultiHeadAttentionTest(self, input_shapes,
num_heads, qk_proj_dim, v_proj_dim, attention_axes,
separate_key_value=separate_key_value, output_dim=None).run_test()
MultiHeadAttentionTest(self, input_shapes,
num_heads, qk_proj_dim, v_proj_dim, None,
separate_key_value=separate_key_value, output_dim=14).run_test()
# def test_multi_head_attention(self):
# q_seq_len, kv_seq_len = 5, 6
# q_dim, k_dim, v_dim = 11, 12, 13
# num_heads, qk_proj_dim, v_proj_dim = 3, 4, 7
# attention_axes = [1, 3]
# num_iterations = 9
# for separate_key_value in [False, True]:
# MultiHeadAttentionTest(self, [(q_seq_len, q_dim),
# (kv_seq_len, k_dim),
# (kv_seq_len, v_dim)],
# num_heads, qk_proj_dim, v_proj_dim, None,
# separate_key_value=separate_key_value, output_dim=15).run_test()
# input_shapes = [(2, num_iterations, q_seq_len, q_dim),
# (2, num_iterations, kv_seq_len, k_dim),
# (2, num_iterations, kv_seq_len, v_dim)]
# MultiHeadAttentionTest(self, input_shapes,
# num_heads, qk_proj_dim, v_proj_dim, attention_axes,
# separate_key_value=separate_key_value, output_dim=14).run_test()
# MultiHeadAttentionTest(self, input_shapes,
# num_heads, qk_proj_dim, v_proj_dim, attention_axes,
# separate_key_value=separate_key_value, output_dim=None).run_test()
# MultiHeadAttentionTest(self, input_shapes,
# num_heads, qk_proj_dim, v_proj_dim, None,
# separate_key_value=separate_key_value, output_dim=14).run_test()

def test_qat(self):
QATWrappersTest(self, layers.Conv2D(3, 4, activation='relu'), test_loading=True).run_test()
Expand Down
64 changes: 32 additions & 32 deletions tests/pytorch_tests/model_tests/test_feature_models_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,24 +247,24 @@ def test_linear_function(self):
"""
LinearFNetTest(self).run_test()

def test_matmul_function(self):
"""
This test checks the MatMul substitution function
"""
MatMulFNetTest(self, [3, 5, 10], [3, 10, 8]).run_test()
MatMulOpNetTest(self, [3, 5, 10], [3, 10, 8]).run_test()
MatMulFNetTest(self, [3, 2, 5, 10], [3, 2, 10, 20]).run_test()
MatMulOpNetTest(self, [3, 2, 5, 10], [3, 2, 10, 20]).run_test()
MatMulFNetTest(self, [50, 2, 400, 32], [50, 1, 32, 80]).run_test()
MatMulOpNetTest(self, [50, 2, 400, 32], [50, 1, 32, 80]).run_test()
MatMulFNetTest(self, [3, 1, 5, 10], [3, 8, 10, 3]).run_test()
MatMulOpNetTest(self, [3, 1, 5, 10], [3, 8, 10, 3]).run_test()
MatMulFNetTest(self, [3, 1, 4, 5, 10], [3, 8, 1, 10, 10]).run_test()
MatMulOpNetTest(self, [3, 1, 4, 5, 10], [3, 8, 1, 10, 10]).run_test()
MatMulFNetTest(self, [3, 10, 6, 5, 50, 100], [3, 10, 1, 1, 100, 80]).run_test()
MatMulOpNetTest(self, [3, 10, 6, 5, 50, 100], [3, 10, 1, 1, 100, 80]).run_test()
MatMulFNetTest(self, [3, 1, 7, 1, 50, 100], [3, 10, 7, 5, 100, 80]).run_test()
MatMulOpNetTest(self, [3, 1, 7, 1, 50, 100], [3, 10, 7, 5, 100, 80]).run_test()
# def test_matmul_function(self):
# """
# This test checks the MatMul substitution function
# """
# MatMulFNetTest(self, [3, 5, 10], [3, 10, 8]).run_test()
# MatMulOpNetTest(self, [3, 5, 10], [3, 10, 8]).run_test()
# MatMulFNetTest(self, [3, 2, 5, 10], [3, 2, 10, 20]).run_test()
# MatMulOpNetTest(self, [3, 2, 5, 10], [3, 2, 10, 20]).run_test()
# MatMulFNetTest(self, [50, 2, 400, 32], [50, 1, 32, 80]).run_test()
# MatMulOpNetTest(self, [50, 2, 400, 32], [50, 1, 32, 80]).run_test()
# MatMulFNetTest(self, [3, 1, 5, 10], [3, 8, 10, 3]).run_test()
# MatMulOpNetTest(self, [3, 1, 5, 10], [3, 8, 10, 3]).run_test()
# MatMulFNetTest(self, [3, 1, 4, 5, 10], [3, 8, 1, 10, 10]).run_test()
# MatMulOpNetTest(self, [3, 1, 4, 5, 10], [3, 8, 1, 10, 10]).run_test()
# MatMulFNetTest(self, [3, 10, 6, 5, 50, 100], [3, 10, 1, 1, 100, 80]).run_test()
# MatMulOpNetTest(self, [3, 10, 6, 5, 50, 100], [3, 10, 1, 1, 100, 80]).run_test()
# MatMulFNetTest(self, [3, 1, 7, 1, 50, 100], [3, 10, 7, 5, 100, 80]).run_test()
# MatMulOpNetTest(self, [3, 1, 7, 1, 50, 100], [3, 10, 7, 5, 100, 80]).run_test()

def test_broken_net(self):
"""
Expand Down Expand Up @@ -639,20 +639,20 @@ def test_mixed_precision_distance_functions(self):
"""
MixedPrecisionDistanceFunctions(self).run_test()

def test_mha_layer_test(self):
"""
This test checks the MultiHeadAttentionDecomposition feature.
"""
num_heads = [3, 7, 5, 11]
q_seq_len, kv_seq_len = [8, 11, 4, 18], [13, 9, 2, 11]
qdim, kdim, vdim = [7, 23, 2, 4], [9, None, 7, None], [11, 17, 7, None]
for iter in range(len(num_heads)):
MHALayerNetTest(self, num_heads[iter], q_seq_len[iter], qdim[iter] * num_heads[iter],
kv_seq_len[iter], kdim[iter], vdim[iter], bias=True).run_test()
MHALayerNetTest(self, num_heads[iter], q_seq_len[iter], qdim[iter] * num_heads[iter],
kv_seq_len[iter], kdim[iter], vdim[iter], bias=False).run_test()
MHALayerNetFeatureTest(self, num_heads[0], q_seq_len[0], qdim[0] * num_heads[0],
kv_seq_len[0], kdim[0], vdim[0], bias=True, add_bias_kv=True).run_test()
# def test_mha_layer_test(self):
# """
# This test checks the MultiHeadAttentionDecomposition feature.
# """
# num_heads = [3, 7, 5, 11]
# q_seq_len, kv_seq_len = [8, 11, 4, 18], [13, 9, 2, 11]
# qdim, kdim, vdim = [7, 23, 2, 4], [9, None, 7, None], [11, 17, 7, None]
# for iter in range(len(num_heads)):
# MHALayerNetTest(self, num_heads[iter], q_seq_len[iter], qdim[iter] * num_heads[iter],
# kv_seq_len[iter], kdim[iter], vdim[iter], bias=True).run_test()
# MHALayerNetTest(self, num_heads[iter], q_seq_len[iter], qdim[iter] * num_heads[iter],
# kv_seq_len[iter], kdim[iter], vdim[iter], bias=False).run_test()
# MHALayerNetFeatureTest(self, num_heads[0], q_seq_len[0], qdim[0] * num_heads[0],
# kv_seq_len[0], kdim[0], vdim[0], bias=True, add_bias_kv=True).run_test()

def test_scaled_dot_product_attention_layer(self):
"""
Expand Down

0 comments on commit 07109f5

Please sign in to comment.