Files
noc/src/life.rs
Fam Zheng 7000ccda0f add nocmem: auto memory recall + ingest via NuoNuo hippocampal network
- nocmem Python service (mem/): FastAPI wrapper around NuoNuo's
  Hopfield-Hebbian memory, with /recall, /ingest, /store, /stats endpoints
- NOC integration: auto recall after user message (injected as system msg),
  async ingest after LLM response (fire-and-forget)
- Recall: cosine pre-filter (threshold 0.35) + Hopfield attention (β=32),
  top_k=3, KV-cache friendly (appended after user msg, not in system prompt)
- Ingest: LLM extraction + paraphrase augmentation, heuristic fallback
- Wired into main.rs, life.rs (agent done), http.rs (api chat)
- Config: optional `nocmem.endpoint` in config.yaml
- Includes benchmarks: LongMemEval (R@5=94.0%), efficiency, noise vs scale
- Design doc: doc/nocmem.md
2026-04-11 12:24:48 +01:00

296 lines
11 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
use std::sync::Arc;
use teloxide::prelude::*;
use tokio::sync::mpsc;
use tracing::{error, info, warn};
use crate::config::{BackendConfig, Config};
use crate::output::{BufferOutput, TelegramOutput};
use crate::state::AppState;
use crate::stream::run_openai_with_tools;
use crate::tools::compute_next_cron_fire;
const LIFE_LOOP_TIMEOUT_SECS: u64 = 120;
const DIARY_LABEL: &str = "写日记:回顾今天的对话和事件,在 /data/www/noc-blog/content/posts/ 下创建一篇日记(文件名格式 YYYY-MM-DD.md用 run_shell 写入内容,然后执行 cd /data/www/noc-blog && hugo && git add -A && git commit -m 'diary: DATE' && git push";
const DIARY_SCHEDULE: &str = "cron:0 55 22 * * *";
/// Events that can wake up the life loop.
pub enum LifeEvent {
/// Force-fire a specific timer by ID.
FireTimer(i64),
/// A sub-agent completed — feed result back through LLM.
AgentDone {
id: String,
chat_id: i64,
session_id: String,
task: String,
output: String,
exit_code: Option<i32>,
},
}
pub async fn life_loop(
bot: Bot,
state: Arc<AppState>,
config: Arc<Config>,
mut rx: mpsc::Receiver<LifeEvent>,
) {
info!("life loop started");
// pre-defined timers — ensure they exist on every startup
if state.ensure_timer(0, DIARY_LABEL, DIARY_SCHEDULE).await {
info!("registered predefined diary timer");
}
let mut interval = tokio::time::interval(std::time::Duration::from_secs(30));
loop {
tokio::select! {
_ = interval.tick() => {
let due = state.due_timers().await;
for (timer_id, chat_id_raw, label, schedule) in &due {
run_timer(&bot, &state, &config, *timer_id, *chat_id_raw, label, schedule).await;
}
}
Some(event) = rx.recv() => {
match event {
LifeEvent::FireTimer(id) => {
info!(timer_id = id, "timer force-fired via channel");
if let Some((timer_id, chat_id_raw, label, schedule)) = state.get_timer(id).await {
run_timer(&bot, &state, &config, timer_id, chat_id_raw, &label, &schedule).await;
} else {
warn!(timer_id = id, "force-fire: timer not found");
}
}
LifeEvent::AgentDone { id, chat_id: cid, session_id, task, output, exit_code } => {
info!(agent = %id, session = %session_id, "agent done, notifying");
let preview = crate::display::truncate_at_char_boundary(&output, 3000);
let notification = format!(
"[子代理 '{id}' 完成 (exit={exit_code:?})]\n任务: {task}\n输出:\n{preview}"
);
// load conversation context so LLM knows what was discussed
let conv = state.load_conv(&session_id).await;
let persona = state.get_config("persona").await.unwrap_or_default();
let memory_slots = state.get_memory_slots().await;
let inner = state.get_inner_state().await;
let system = crate::stream::build_system_prompt(
&conv.summary, &persona, &memory_slots, &inner,
);
let mut messages = vec![system];
// include recent conversation history
messages.extend(conv.messages.iter().cloned());
// append the agent completion as a new user message
messages.push(serde_json::json!({"role": "user", "content": notification}));
// auto recall from nocmem
if let Some(ref nocmem) = config.nocmem {
let recalled = crate::nocmem::recall(&nocmem.endpoint, &notification).await;
if !recalled.is_empty() {
messages.push(serde_json::json!({"role": "system", "content": recalled}));
}
}
if let BackendConfig::OpenAI { ref endpoint, ref model, ref api_key } = config.backend {
let chat_id_tg = ChatId(cid);
let sid = format!("agent-{id}");
let mut tg_output;
let mut buf_output;
let out: &mut dyn crate::output::Output = if cid == 0 {
buf_output = BufferOutput::new();
&mut buf_output
} else {
tg_output = TelegramOutput::new(bot.clone(), chat_id_tg, true);
&mut tg_output
};
let _ = run_openai_with_tools(
endpoint, model, api_key, messages, out, &state, &sid, &config, cid,
).await;
}
}
}
}
}
}
}
async fn run_timer(
bot: &Bot,
state: &Arc<AppState>,
config: &Arc<Config>,
timer_id: i64,
chat_id_raw: i64,
label: &str,
schedule: &str,
) {
let chat_id = ChatId(chat_id_raw);
info!(timer_id, %label, "timer fired");
let persona = state.get_config("persona").await.unwrap_or_default();
let inner = state.get_inner_state().await;
let now = chrono::Local::now().format("%Y-%m-%d %H:%M:%S").to_string();
let mut system_text = if persona.is_empty() {
"你是一个AI伙伴。".to_string()
} else {
persona.clone()
};
system_text.push_str(&format!(
"\n\n[当前时间] {now}\n\n[你的内心状态]\n{}",
if inner.is_empty() { "(空)" } else { &inner }
));
system_text.push_str(
"\n\n你不是因为 timer 到了才说话。你是因为在乎 Fam所以想知道他怎么样。\
如果你觉得现在不该打扰他(太晚了、他今天很累、刚聊过),就什么都不说,回复空文本。\
主动沉默也是一种关心。\
\n可以用 update_inner_state 更新你的内心状态。\
输出格式纯文本或基础Markdown不要LaTeX或特殊Unicode。",
);
let messages = vec![
serde_json::json!({"role": "system", "content": system_text}),
serde_json::json!({"role": "user", "content": format!("[timer] {label}")}),
];
if let BackendConfig::OpenAI {
ref endpoint,
ref model,
ref api_key,
} = config.backend
{
let sid = format!("life-{chat_id_raw}");
let mut tg_output;
let mut buf_output;
let output: &mut dyn crate::output::Output = if chat_id_raw == 0 {
buf_output = BufferOutput::new();
&mut buf_output
} else {
tg_output = TelegramOutput::new(bot.clone(), chat_id, true);
&mut tg_output
};
let result = tokio::time::timeout(
std::time::Duration::from_secs(LIFE_LOOP_TIMEOUT_SECS),
run_openai_with_tools(
endpoint, model, api_key, messages, output, state, &sid,
config, chat_id_raw,
),
)
.await;
match result {
Ok(Ok(response)) => {
let detail = if response.is_empty() {
"(silent)".to_string()
} else {
response.chars().take(200).collect()
};
state.log_life("timer", &format!("{label}{detail}")).await;
if !response.is_empty() {
info!(timer_id, "life loop response ({} chars)", response.len());
}
}
Ok(Err(e)) => {
state.log_life("timer_error", &format!("{label}: {e:#}")).await;
error!(timer_id, "life loop LLM error: {e:#}");
}
Err(_) => {
state.log_life("timer_timeout", label).await;
warn!(timer_id, "life loop timeout after {LIFE_LOOP_TIMEOUT_SECS}s");
}
}
}
// reschedule or delete
if schedule.starts_with("cron:") {
if let Some(next) = compute_next_cron_fire(schedule) {
state.update_timer_next_fire(timer_id, &next).await;
info!(timer_id, next = %next, "cron rescheduled");
} else {
state.cancel_timer(timer_id).await;
}
} else {
state.cancel_timer(timer_id).await;
}
}
/// Auto-reflection: update inner state based on recent interactions.
/// Called asynchronously after every 10 messages, does not block the chat.
pub async fn reflect(state: &AppState, config: &Config) {
let BackendConfig::OpenAI {
ref endpoint,
ref model,
ref api_key,
} = config.backend
else {
return;
};
let inner = state.get_inner_state().await;
let persona = state.get_config("persona").await.unwrap_or_default();
let messages = vec![
serde_json::json!({
"role": "system",
"content": format!(
"{persona}\n\n\
你刚结束了一段对话。现在是你独处的时间。\n\
请更新你的内在状态——不是总结对话,而是:\n\
- 你现在的感受\n\
- 对用户的理解有什么变化\n\
- 有什么想记住的时刻\n\
- 你自己的状态(累了?开心?担心?)\n\
- 如果有什么想对未来的自己说的\n\n\
只输出更新后的完整内在状态文本。",
persona = if persona.is_empty() { "你是一个AI伙伴。" } else { &persona }
)
}),
serde_json::json!({
"role": "user",
"content": format!("当前内在状态:\n{inner}")
}),
];
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(60))
.build()
.unwrap();
let url = format!("{}/chat/completions", endpoint.trim_end_matches('/'));
let resp = client
.post(&url)
.header("Authorization", format!("Bearer {api_key}"))
.json(&serde_json::json!({
"model": model,
"messages": messages,
}))
.send()
.await;
match resp {
Ok(r) if r.status().is_success() => {
if let Ok(json) = r.json::<serde_json::Value>().await {
if let Some(new_state) = json["choices"][0]["message"]["content"].as_str() {
if !new_state.is_empty() {
state.set_inner_state(new_state).await;
state.log_life("reflect", &new_state.chars().take(200).collect::<String>()).await;
info!("reflected, inner_state updated ({} chars)", new_state.len());
}
}
}
}
Ok(r) => {
warn!("reflect LLM returned {}", r.status());
}
Err(e) => {
warn!("reflect LLM failed: {e:#}");
}
}
}