Skip to content

Commit f146013

Browse files
SonAIengineclaude
andcommitted
feat: synaptic-memory v0.1.0 — 뇌 신경망 기반 knowledge graph 라이브러리
핵심 모듈: - models: Node/Edge/ActivatedNode + ConsolidationLevel(L0→L3) + NodeKind 9종 - protocols: StorageBackend/Digester/QueryRewriter/TagExtractor Protocol - graph: SynapticGraph facade (add/link/search/reinforce/consolidate/prune/decay) - search: 3단계 하이브리드 검색 (FTS+fuzzy+vector → 동의어 확장 → query rewrite) - hebbian: Hebbian learning — 성공/실패 기반 edge weight 자동 조정 - resonance: 4축 scoring (relevance × importance × recency × vitality) - consolidation: L0→L3 메모리 압축 캐스케이드 (TTL + 사용빈도 승격) - synonyms: 한/영 동의어 맵 (30+ 그룹) - exporter: Markdown 내보내기 백엔드: - MemoryBackend: dict 기반 in-memory (테스트용) - SQLiteBackend: FTS5 + recursive CTE 그래프 순회 - PostgreSQLBackend: stub (v0.2.0 예정 — AGE + pgvector + pg_trgm) Zero core deps — 백엔드만 extras (sqlite/postgresql) 93 tests passed, pyright strict 0 errors Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent c56fe8b commit f146013

28 files changed

+2820
-0
lines changed

pyproject.toml

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
[build-system]
2+
requires = ["hatchling"]
3+
build-backend = "hatchling.build"
4+
5+
[project]
6+
name = "synaptic-memory"
7+
version = "0.1.0"
8+
description = "Brain-inspired knowledge graph: spreading activation, Hebbian learning, memory consolidation."
9+
license = "MIT"
10+
requires-python = ">=3.12"
11+
authors = [{ name = "Son Seongjun" }]
12+
dependencies = []
13+
14+
[project.optional-dependencies]
15+
postgresql = ["asyncpg>=0.30", "pgvector>=0.3"]
16+
sqlite = ["aiosqlite>=0.20"]
17+
all = ["asyncpg>=0.30", "pgvector>=0.3", "aiosqlite>=0.20"]
18+
dev = ["pytest>=8.3", "pytest-asyncio>=0.24", "ruff>=0.8", "pyright>=1.1"]
19+
20+
[tool.hatch.build.targets.wheel]
21+
packages = ["src/synaptic"]
22+
23+
[tool.pytest.ini_options]
24+
asyncio_mode = "auto"
25+
testpaths = ["tests"]
26+
27+
[tool.ruff]
28+
target-version = "py312"
29+
line-length = 100
30+
31+
[tool.ruff.lint]
32+
select = ["E", "F", "W", "I", "N", "UP", "B", "A", "S", "ASYNC", "PLC", "RUF"]
33+
ignore = ["S101"]
34+
35+
[tool.pyright]
36+
pythonVersion = "3.12"
37+
typeCheckingMode = "strict"

src/synaptic/__init__.py

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
"""Synaptic Memory — Brain-inspired knowledge graph."""
2+
3+
from synaptic.graph import SynapticGraph
4+
from synaptic.models import (
5+
ActivatedNode,
6+
ConsolidationLevel,
7+
DigestResult,
8+
Edge,
9+
EdgeKind,
10+
Node,
11+
NodeKind,
12+
SearchResult,
13+
)
14+
from synaptic.protocols import Digester, QueryRewriter, StorageBackend, TagExtractor
15+
16+
__all__ = [
17+
"ActivatedNode",
18+
"ConsolidationLevel",
19+
"DigestResult",
20+
"Digester",
21+
"Edge",
22+
"EdgeKind",
23+
"Node",
24+
"NodeKind",
25+
"QueryRewriter",
26+
"SearchResult",
27+
"StorageBackend",
28+
"SynapticGraph",
29+
"TagExtractor",
30+
]

src/synaptic/backends/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
"""Storage backends for Synaptic Memory."""

src/synaptic/backends/memory.py

Lines changed: 193 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,193 @@
1+
"""In-memory storage backend for testing."""
2+
3+
from __future__ import annotations
4+
5+
from collections.abc import Sequence
6+
from difflib import SequenceMatcher
7+
8+
from synaptic.models import (
9+
ConsolidationLevel,
10+
Edge,
11+
Node,
12+
NodeKind,
13+
)
14+
15+
16+
class MemoryBackend:
17+
"""Dict-based in-memory backend. No external dependencies."""
18+
19+
__slots__ = ("_edges", "_nodes")
20+
21+
def __init__(self) -> None:
22+
self._nodes: dict[str, Node] = {}
23+
self._edges: dict[str, Edge] = {}
24+
25+
async def connect(self) -> None:
26+
pass
27+
28+
async def close(self) -> None:
29+
self._nodes.clear()
30+
self._edges.clear()
31+
32+
# --- Node CRUD ---
33+
34+
async def save_node(self, node: Node) -> None:
35+
self._nodes[node.id] = node
36+
37+
async def get_node(self, node_id: str) -> Node | None:
38+
return self._nodes.get(node_id)
39+
40+
async def update_node(self, node: Node) -> None:
41+
if node.id in self._nodes:
42+
self._nodes[node.id] = node
43+
44+
async def delete_node(self, node_id: str) -> None:
45+
self._nodes.pop(node_id, None)
46+
# Cascade delete edges
47+
to_delete = [
48+
eid
49+
for eid, e in self._edges.items()
50+
if e.source_id == node_id or e.target_id == node_id
51+
]
52+
for eid in to_delete:
53+
del self._edges[eid]
54+
55+
async def list_nodes(
56+
self,
57+
*,
58+
kind: NodeKind | None = None,
59+
level: ConsolidationLevel | None = None,
60+
limit: int = 100,
61+
) -> list[Node]:
62+
result: list[Node] = []
63+
for node in self._nodes.values():
64+
if kind is not None and node.kind != kind:
65+
continue
66+
if level is not None and node.level != level:
67+
continue
68+
result.append(node)
69+
if len(result) >= limit:
70+
break
71+
return result
72+
73+
# --- Edge CRUD ---
74+
75+
async def save_edge(self, edge: Edge) -> None:
76+
self._edges[edge.id] = edge
77+
78+
async def get_edges(self, node_id: str, *, direction: str = "both") -> list[Edge]:
79+
result: list[Edge] = []
80+
for edge in self._edges.values():
81+
if direction in ("both", "outgoing") and edge.source_id == node_id:
82+
result.append(edge)
83+
elif direction in ("both", "incoming") and edge.target_id == node_id:
84+
result.append(edge)
85+
return result
86+
87+
async def update_edge(self, edge: Edge) -> None:
88+
if edge.id in self._edges:
89+
self._edges[edge.id] = edge
90+
91+
async def delete_edge(self, edge_id: str) -> None:
92+
self._edges.pop(edge_id, None)
93+
94+
# --- Search ---
95+
96+
async def search_fts(self, query: str, *, limit: int = 20) -> list[Node]:
97+
query_lower = query.lower()
98+
terms = query_lower.split()
99+
scored: list[tuple[Node, int]] = []
100+
for node in self._nodes.values():
101+
text = f"{node.title} {node.content}".lower()
102+
hits = sum(1 for t in terms if t in text)
103+
if hits > 0:
104+
scored.append((node, hits))
105+
scored.sort(key=lambda x: x[1], reverse=True)
106+
return [n for n, _ in scored[:limit]]
107+
108+
async def search_fuzzy(
109+
self, query: str, *, limit: int = 20, threshold: float = 0.3
110+
) -> list[Node]:
111+
scored: list[tuple[Node, float]] = []
112+
for node in self._nodes.values():
113+
text = f"{node.title} {node.content}"
114+
ratio = SequenceMatcher(None, query.lower(), text.lower()).ratio()
115+
if ratio >= threshold:
116+
scored.append((node, ratio))
117+
scored.sort(key=lambda x: x[1], reverse=True)
118+
return [n for n, _ in scored[:limit]]
119+
120+
async def search_vector(self, embedding: list[float], *, limit: int = 20) -> list[Node]:
121+
if not embedding:
122+
return []
123+
scored: list[tuple[Node, float]] = []
124+
for node in self._nodes.values():
125+
if not node.embedding:
126+
continue
127+
sim = _cosine_similarity(embedding, node.embedding)
128+
scored.append((node, sim))
129+
scored.sort(key=lambda x: x[1], reverse=True)
130+
return [n for n, _ in scored[:limit]]
131+
132+
# --- Graph traversal ---
133+
134+
async def get_neighbors(self, node_id: str, *, depth: int = 1) -> list[tuple[Node, Edge]]:
135+
result: list[tuple[Node, Edge]] = []
136+
visited: set[str] = {node_id}
137+
frontier: set[str] = {node_id}
138+
139+
for _ in range(depth):
140+
next_frontier: set[str] = set()
141+
for nid in frontier:
142+
for edge in self._edges.values():
143+
neighbor_id: str | None = None
144+
if edge.source_id == nid and edge.target_id not in visited:
145+
neighbor_id = edge.target_id
146+
elif edge.target_id == nid and edge.source_id not in visited:
147+
neighbor_id = edge.source_id
148+
149+
if neighbor_id is not None:
150+
neighbor = self._nodes.get(neighbor_id)
151+
if neighbor is not None:
152+
result.append((neighbor, edge))
153+
visited.add(neighbor_id)
154+
next_frontier.add(neighbor_id)
155+
frontier = next_frontier
156+
157+
return result
158+
159+
# --- Batch ---
160+
161+
async def save_nodes_batch(self, nodes: Sequence[Node]) -> None:
162+
for node in nodes:
163+
self._nodes[node.id] = node
164+
165+
async def save_edges_batch(self, edges: Sequence[Edge]) -> None:
166+
for edge in edges:
167+
self._edges[edge.id] = edge
168+
169+
# --- Maintenance ---
170+
171+
async def prune_edges(self, *, weight_below: float = 0.1) -> int:
172+
to_delete = [eid for eid, e in self._edges.items() if e.weight < weight_below]
173+
for eid in to_delete:
174+
del self._edges[eid]
175+
return len(to_delete)
176+
177+
async def decay_vitality(self, *, factor: float = 0.95) -> int:
178+
count = 0
179+
for node in self._nodes.values():
180+
node.vitality *= factor
181+
count += 1
182+
return count
183+
184+
185+
def _cosine_similarity(a: list[float], b: list[float]) -> float:
186+
if len(a) != len(b) or not a:
187+
return 0.0
188+
dot = sum(x * y for x, y in zip(a, b, strict=True))
189+
norm_a = sum(x * x for x in a) ** 0.5
190+
norm_b = sum(x * x for x in b) ** 0.5
191+
if norm_a == 0 or norm_b == 0:
192+
return 0.0
193+
return dot / (norm_a * norm_b)
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
"""PostgreSQL backend stub — Phase C (v0.2.0)."""
2+
3+
from __future__ import annotations
4+
5+
6+
class PostgreSQLBackend:
7+
"""PostgreSQL backend with AGE + pgvector + pg_trgm.
8+
9+
Not yet implemented. Planned for synaptic-memory v0.2.0.
10+
See docs for the full PostgreSQL schema.
11+
"""
12+
13+
def __init__(self, dsn: str = "") -> None:
14+
msg = (
15+
"PostgreSQL backend is not yet implemented. "
16+
"Use MemoryBackend or SQLiteBackend for now. "
17+
"Planned for synaptic-memory v0.2.0."
18+
)
19+
raise NotImplementedError(msg)

0 commit comments

Comments
 (0)