Skip to content

Commit bd58969

Browse files
committed
feat: add seed‐variation, idempotent and large‐input tests (#355)
1 parent ab387dc commit bd58969

File tree

3 files changed

+32
-1
lines changed

3 files changed

+32
-1
lines changed

nebula3/pyg/__init__.py

Whitespace-only changes.

nebula3/pyg/pyg_feature_store.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
2+
from torch_geometric.data.feature_store import FeatureStore, TensorAttr
3+
class NebulaFeatureStore(FeatureStore):
4+
5+
def __init__(self, connection, num_threads:int | None = None):
6+
super().__init__()
7+
self.
8+
9+

tests/test_hash.py

Lines changed: 23 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,13 @@
1111
TEST_VECTORS = [
1212
(b"", 6142509188972423790),
1313
(b"a", 4993892634952068459),
14-
(b"abcdefgh", 8664279048047335611), # length-8 cases
14+
(b"abcdefgh", 8664279048047335611), # length-8 bytes cases
1515
(b"abcdefghi", -5409788147785758033),
1616
("to_be_hashed", -1098333533029391540),
1717
("中文", -8591787916246384322),
1818
]
1919

20+
2021
@pytest.mark.parametrize("data, expected", TEST_VECTORS)
2122
def test_known_vectors(data, expected):
2223
assert murmur_hash(data) == expected
@@ -36,3 +37,24 @@ def test_type_error():
3637
"""
3738
with pytest.raises(TypeError):
3839
murmur_hash(12345)
40+
41+
42+
def test_seed_variation():
43+
"""Different seed values should produce different hashes."""
44+
data = b"seed_test"
45+
hash1 = murmur_hash(data, seed=0)
46+
hash2 = murmur_hash(data, seed=1)
47+
assert hash1 != hash2
48+
49+
50+
def test_idempotent():
51+
"""Repeated calls with same input must yield the same result."""
52+
data = b"consistent"
53+
assert murmur_hash(data) == murmur_hash(data)
54+
55+
56+
def test_large_input_performance():
57+
"""Large inputs should be processed without error and return an int."""
58+
data = b"x" * 10_000
59+
result = murmur_hash(data)
60+
assert isinstance(result, int)

0 commit comments

Comments
 (0)