NuoNuo: Hippocampal memory module prototype

Hopfield + Hebbian hybrid memory system for LLMs.
Two nights of experiments (16 iterations), validated on LongMemEval (ICLR 2025).

Architecture:
- Single-hop: Two-Stage Hopfield (NN top-20 → softmax settle)
- Multi-hop: Hebbian W matrix with WTA pattern separation
- 64% on LongMemEval (500 questions), retrieval-only, no LLM dependency
- 4ms latency @ 20K memories, ~1GB VRAM

Key findings:
- Hopfield attention solved noise tolerance (20% → 100% vs flat Hebbian)
- WTA pattern separation enables 20K+ capacity
- Multi-hop associative chains (6 hops, CosSim=1.0) — RAG can't do this
- MiniLM-L6 is optimal (discrimination gap > absolute similarity)
- Paraphrase cue augmentation: 55% → 100% on synthetic, 36% → 64% on benchmark
- SNN encoder viable (CosSim 0.99) but not needed for current architecture
This commit is contained in:
2026-04-07 10:37:24 +01:00
commit d923aa1e31
65 changed files with 13148 additions and 0 deletions

114
doc/exp02e_results.json Normal file
View File

@@ -0,0 +1,114 @@
{
"soft_wta_t0.01": {
"0.0": 0.9924059003591538,
"0.05": 0.7081658291816711,
"0.1": 0.3512206456577405,
"0.2": 0.1427949102059938,
"0.5": 0.06214611444971524,
"1.0": 0.03803978644893505
},
"soft_wta_t0.05": {
"0.0": 0.7770068669319152,
"0.05": 0.7753341776132584,
"0.1": 0.7744931131601334,
"0.2": 0.7739920604228974,
"0.5": 0.7737001150846481,
"1.0": 0.7735983967781067
},
"soft_wta_t0.1": {
"0.0": 0.9377952325344086,
"0.05": 0.9377174872159958,
"0.1": 0.9376753580570221,
"0.2": 0.9376475828886032,
"0.5": 0.9376276469230652,
"1.0": 0.9376224195957183
},
"soft_wta_t0.5": {
"0.0": 0.9974229729175568,
"0.05": 0.9974228632450104,
"0.1": 0.9974228018522262,
"0.2": 0.9974227517843246,
"0.5": 0.9974227398633957,
"1.0": 0.9974227231740952
},
"multiprobe_4": {
"0.0": 0.0,
"0.05": 0.0,
"0.1": 0.0,
"0.2": 0.0,
"0.5": 0.0,
"1.0": 0.0
},
"multiprobe_8": {
"0.0": 0.0,
"0.05": 0.0,
"0.1": 0.0,
"0.2": 0.0,
"0.5": 0.0,
"1.0": 0.0
},
"multiprobe_16": {
"0.0": 0.0,
"0.05": 0.0,
"0.1": 0.0,
"0.2": 0.0,
"0.5": 0.0,
"1.0": 0.0
},
"multiprobe_32": {
"0.0": 0.0,
"0.05": 0.0,
"0.1": 0.0,
"0.2": 0.0,
"0.5": 0.0,
"1.0": 0.0
},
"coarse_to_fine": {
"0.0": 0.9999999326467514,
"0.05": 0.9999999326467514,
"0.1": 0.9999999326467514,
"0.2": 0.9999999326467514,
"0.5": 0.24099998503923417,
"1.0": 0.07149999514222145
},
"wider_k_50": {
"0.0": 1.000000058412552,
"0.05": 0.96500005453825,
"0.1": 0.3752000237070024,
"0.2": 0.10180000556632876,
"0.5": 0.021200001928955315,
"1.0": 0.01700000114738941
},
"wider_k_100": {
"0.0": 1.0000000560283662,
"0.05": 0.9984000563621521,
"0.1": 0.6423000478558243,
"0.2": 0.18020001276396214,
"0.5": 0.050500003919005394,
"1.0": 0.03480000267736614
},
"wider_k_200": {
"0.0": 1.0000000560283662,
"0.05": 0.9999500566720962,
"0.1": 0.6304500451683999,
"0.2": 0.18000001210719346,
"0.5": 0.07430000650696457,
"1.0": 0.06735000459477306
},
"wider_k_500": {
"0.0": 0.9999999970197677,
"0.05": 0.9025200027227401,
"0.1": 0.38294000312685966,
"0.2": 0.17088000044226648,
"0.5": 0.09710000049322844,
"1.0": 0.08222000036388635
},
"wider_k_1000": {
"0.0": 0.9985101699829102,
"0.05": 0.5221900832653046,
"0.1": 0.27553004458546637,
"0.2": 0.16993002608418464,
"0.5": 0.13159002162516117,
"1.0": 0.11921001873910426
}
}