import json
import pytest
from silk import GraphStore
ONTOLOGY = json.dumps({
"node_types": {
"entity": {"properties": {}},
"signal": {"properties": {}}
},
"edge_types": {
"LINKS": {
"source_types": ["entity"],
"target_types": ["entity"],
"properties": {}
}
}
})
def _store(instance_id="test"):
return GraphStore(instance_id, ONTOLOGY)
def test_sync_rejects_invalid_node_type():
store_a = _store("a")
store_b = _store("b")
store_a.add_node("n1", "entity", "Node 1")
offer = store_a.generate_sync_offer()
payload = store_b.receive_sync_offer(offer)
store_a.merge_sync_payload(payload)
offer = store_b.generate_sync_offer()
payload = store_a.receive_sync_offer(offer)
store_b.merge_sync_payload(payload)
assert store_b.get_node("n1") is not None
def test_sync_valid_entries_converge():
store_a = _store("a")
store_b = _store("b")
store_a.add_node("n1", "entity", "Node 1")
store_a.add_node("n2", "entity", "Node 2")
store_a.add_edge("e1", "LINKS", "n1", "n2")
offer = store_a.generate_sync_offer()
payload = store_b.receive_sync_offer(offer)
store_a.merge_sync_payload(payload)
offer = store_b.generate_sync_offer()
payload = store_a.receive_sync_offer(offer)
store_b.merge_sync_payload(payload)
assert store_b.get_node("n1") is not None
assert store_b.get_node("n2") is not None
assert store_b.get_edge("e1") is not None
def test_deeply_nested_value_rejected():
store = _store()
deep = "leaf"
for _ in range(100):
deep = {"nested": deep}
with pytest.raises(ValueError, match="depth"):
store.add_node("n1", "entity", "Node", {"data": deep})
def test_moderate_nesting_accepted():
store = _store()
nested = "leaf"
for _ in range(10):
nested = {"level": nested}
store.add_node("n1", "entity", "Node", {"data": nested})
node = store.get_node("n1")
assert node is not None
def test_oversized_string_rejected():
store = _store()
big_string = "x" * (1_048_577)
with pytest.raises(ValueError, match="exceeds maximum"):
store.add_node("n1", "entity", "Node", {"data": big_string})
def test_normal_string_accepted():
store = _store()
ok_string = "x" * 10_000
store.add_node("n1", "entity", "Node", {"data": ok_string})
node = store.get_node("n1")
assert len(node["properties"]["data"]) == 10_000
def test_oversized_list_rejected():
store = _store()
big_list = list(range(10_001))
with pytest.raises(ValueError, match="exceeds maximum"):
store.add_node("n1", "entity", "Node", {"data": big_list})
def test_normal_list_accepted():
store = _store()
ok_list = list(range(100))
store.add_node("n1", "entity", "Node", {"data": ok_list})
node = store.get_node("n1")
assert len(node["properties"]["data"]) == 100
def test_oversized_map_rejected():
store = _store()
big_map = {f"k{i}": i for i in range(10_001)}
with pytest.raises(ValueError, match="exceeds maximum"):
store.add_node("n1", "entity", "Node", {"data": big_map})
def test_corrupt_sync_payload_rejected():
store = _store()
with pytest.raises(ValueError):
store.merge_sync_payload(b"this is not valid msgpack")
def test_corrupt_sync_offer_rejected():
store = _store()
with pytest.raises(ValueError):
store.receive_sync_offer(b"garbage")
def test_snapshot_roundtrip():
store_a = _store("a")
store_a.add_node("n1", "entity", "Node 1")
store_a.add_node("n2", "entity", "Node 2")
store_a.add_edge("e1", "LINKS", "n1", "n2")
snapshot = store_a.snapshot()
store_b = GraphStore.from_snapshot("b", snapshot)
assert store_b.get_node("n1") is not None
assert store_b.get_node("n2") is not None
assert store_b.get_edge("e1") is not None
def test_remove_nonexistent_node_no_crash():
store = _store()
store.remove_node("nonexistent")
def test_update_property_on_nonexistent_entity():
store = _store()
store.update_property("nonexistent", "key", "value")