{
  "canonical_name": "yantrikos/yantrikdb",
  "compilation_id": "pack_bed9da571546436395da155f00737c80",
  "created_at": "2026-05-16T13:22:42.085255+00:00",
  "created_by": "project-pack-compiler",
  "feedback": {
    "carrier_selection_notes": [
      "viable_asset_types=mcp_config, recipe, host_instruction, eval, preflight",
      "recommended_asset_types=mcp_config, recipe, host_instruction, eval, preflight"
    ],
    "evidence_delta": {
      "confirmed_claims": [
        "identity_anchor_present",
        "capability_and_host_targets_present",
        "install_path_declared_or_better"
      ],
      "missing_required_fields": [],
      "must_verify_forwarded": [
        "Run or inspect `pip install yantrikdb-mcp` in an isolated environment.",
        "Confirm the project exposes the claimed capability to at least one target host."
      ],
      "quickstart_execution_scope": "allowlisted_sandbox_smoke",
      "sandbox_command": "pip install yantrikdb-mcp",
      "sandbox_container_image": "python:3.12-slim",
      "sandbox_execution_backend": "docker",
      "sandbox_planner_decision": "llm_execute_isolated_install",
      "sandbox_validation_id": "sbx_5df036e1c7994461b6ff606bc1ca3fd6"
    },
    "feedback_event_type": "project_pack_compilation_feedback",
    "learning_candidate_reasons": [],
    "template_gaps": []
  },
  "identity": {
    "canonical_id": "project_cb9d289d2e0c9a308b9af311e310bf3f",
    "canonical_name": "yantrikos/yantrikdb",
    "homepage_url": null,
    "license": "unknown",
    "repo_url": "https://github.com/yantrikos/yantrikdb",
    "slug": "yantrikdb",
    "source_packet_id": "phit_f3b095d238ec4c2ebe929367905d4a7d",
    "source_validation_id": "dval_c248d7a8ec53475a8b3593d4b8b4e781"
  },
  "merchandising": {
    "best_for": "需要工具连接与集成能力，并使用 mcp_host的用户",
    "github_forks": 6,
    "github_stars": 17,
    "one_liner_en": "Cognitive memory engine for AI agents — temporal decay, contradiction detection, autonomous consolidation, knowledge graph, ANN recall via HNSW. Embeddable Rust library with Python bindings; powers yantrikdb-server (HTTP gateway, MCP server, openraft cluster). AGPL.",
    "one_liner_zh": "Cognitive memory engine for AI agents — temporal decay, contradiction detection, autonomous consolidation, knowledge graph, ANN recall via HNSW. Embeddable Rust library with Python bindings; powers yantrikdb-server (HTTP gateway, MCP server, openraft cluster). AGPL.",
    "primary_category": {
      "category_id": "tool-integrations",
      "confidence": "high",
      "name_en": "Tool Integrations",
      "name_zh": "工具连接与集成",
      "reason": "matched_keywords:mcp, server, github"
    },
    "target_user": "使用 mcp_host, claude, claude_code 等宿主 AI 的用户",
    "title_en": "yantrikdb",
    "title_zh": "yantrikdb 能力包",
    "visible_tags": [
      {
        "label_en": "MCP Tools",
        "label_zh": "MCP 工具",
        "source": "repo_evidence_project_characteristics",
        "tag_id": "product_domain-mcp-tools",
        "type": "product_domain"
      },
      {
        "label_en": "Knowledge Base Q&A",
        "label_zh": "知识库问答",
        "source": "repo_evidence_project_characteristics",
        "tag_id": "user_job-knowledge-base-q-a",
        "type": "user_job"
      },
      {
        "label_en": "Natural-language Web Actions",
        "label_zh": "自然语言网页操作",
        "source": "repo_evidence_project_characteristics",
        "tag_id": "core_capability-natural-language-web-actions",
        "type": "core_capability"
      },
      {
        "label_en": "Checkpoint Resume",
        "label_zh": "断点恢复流程",
        "source": "repo_evidence_project_characteristics",
        "tag_id": "workflow_pattern-checkpoint-resume",
        "type": "workflow_pattern"
      },
      {
        "label_en": "Evaluation Suite",
        "label_zh": "评测体系",
        "source": "repo_evidence_project_characteristics",
        "tag_id": "selection_signal-evaluation-suite",
        "type": "selection_signal"
      }
    ]
  },
  "packet_id": "phit_f3b095d238ec4c2ebe929367905d4a7d",
  "page_model": {
    "artifacts": {
      "artifact_slug": "yantrikdb",
      "files": [
        "PROJECT_PACK.json",
        "QUICK_START.md",
        "PROMPT_PREVIEW.md",
        "HUMAN_MANUAL.md",
        "AI_CONTEXT_PACK.md",
        "BOUNDARY_RISK_CARD.md",
        "PITFALL_LOG.md",
        "REPO_INSPECTION.json",
        "REPO_INSPECTION.md",
        "CAPABILITY_CONTRACT.json",
        "EVIDENCE_INDEX.json",
        "CLAIM_GRAPH.json"
      ],
      "required_files": [
        "PROJECT_PACK.json",
        "QUICK_START.md",
        "PROMPT_PREVIEW.md",
        "HUMAN_MANUAL.md",
        "AI_CONTEXT_PACK.md",
        "BOUNDARY_RISK_CARD.md",
        "PITFALL_LOG.md",
        "REPO_INSPECTION.json"
      ]
    },
    "detail": {
      "capability_source": "Project Hit Packet + DownstreamValidationResult",
      "commands": [
        {
          "command": "pip install yantrikdb-mcp",
          "label": "Python / pip · 官方安装入口",
          "source": "https://github.com/yantrikos/yantrikdb#readme",
          "verified": true
        }
      ],
      "display_tags": [
        "MCP 工具",
        "知识库问答",
        "自然语言网页操作",
        "断点恢复流程",
        "评测体系"
      ],
      "eyebrow": "工具连接与集成",
      "glance": [
        {
          "body": "判断自己是不是目标用户。",
          "label": "最适合谁",
          "value": "需要工具连接与集成能力，并使用 mcp_host的用户"
        },
        {
          "body": "先理解能力边界，再决定是否继续。",
          "label": "核心价值",
          "value": "Cognitive memory engine for AI agents — temporal decay, contradiction detection, autonomous consolidation, knowledge graph, ANN recall via HNSW. Embeddable Rust library with Python bindings; powers yantrikdb-server (HTTP gateway, MCP server, openraft cluster). AGPL."
        },
        {
          "body": "未完成验证前保持审慎。",
          "label": "继续前",
          "value": "publish to Doramagic.ai project surfaces"
        }
      ],
      "guardrail_source": "Boundary & Risk Card",
      "guardrails": [
        {
          "body": "Prompt Preview 只展示流程，不证明项目已安装或运行。",
          "label": "Check 1",
          "value": "不要把试用当真实运行"
        },
        {
          "body": "mcp_host, claude, claude_code",
          "label": "Check 2",
          "value": "确认宿主兼容"
        },
        {
          "body": "publish to Doramagic.ai project surfaces",
          "label": "Check 3",
          "value": "先隔离验证"
        }
      ],
      "mode": "mcp_config, recipe, host_instruction, eval, preflight",
      "pitfall_log": {
        "items": [
          {
            "body": "GitHub 社区证据显示该项目存在一个安装相关的待验证问题：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication",
            "category": "安装坑",
            "evidence": [
              "community_evidence:github | cevd_4ab95be6a3ac4fb192053e8c3829f762 | https://github.com/yantrikos/yantrikdb/issues/9 | 来源讨论提到 node 相关条件，需在安装/试用前复核。"
            ],
            "severity": "medium",
            "suggested_check": "来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。",
            "title": "来源证据：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication",
            "user_impact": "可能阻塞安装或首次运行。"
          },
          {
            "body": "GitHub 社区证据显示该项目存在一个安装相关的待验证问题：Bug: `namespace` parameter ignored in batch `remember` calls — memories always stored under `default`",
            "category": "安装坑",
            "evidence": [
              "community_evidence:github | cevd_c37cd96e9c8d476880caca4f7314118e | https://github.com/yantrikos/yantrikdb/issues/2 | 来源讨论提到 python 相关条件，需在安装/试用前复核。"
            ],
            "severity": "medium",
            "suggested_check": "来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。",
            "title": "来源证据：Bug: `namespace` parameter ignored in batch `remember` calls — memories always stored under `default`",
            "user_impact": "可能增加新用户试用和生产接入成本。"
          },
          {
            "body": "GitHub 社区证据显示该项目存在一个安装相关的待验证问题：Migration v14→v15 fails: ALTER TABLE on edges view",
            "category": "安装坑",
            "evidence": [
              "community_evidence:github | cevd_bb378d100e9d472892b1d5e42e640cad | https://github.com/yantrikos/yantrikdb/issues/10 | 来源讨论提到 python 相关条件，需在安装/试用前复核。"
            ],
            "severity": "medium",
            "suggested_check": "来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。",
            "title": "来源证据：Migration v14→v15 fails: ALTER TABLE on edges view",
            "user_impact": "可能影响升级、迁移或版本选择。"
          },
          {
            "body": "GitHub 社区证据显示该项目存在一个安装相关的待验证问题：[bug] Tombstoned memories still appear in similarity-scan recall results",
            "category": "安装坑",
            "evidence": [
              "community_evidence:github | cevd_aa3d426055a44483b47ffd3b9f3fdb6a | https://github.com/yantrikos/yantrikdb/issues/8 | 来源类型 github_issue 暴露的待验证使用条件。"
            ],
            "severity": "medium",
            "suggested_check": "来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。",
            "title": "来源证据：[bug] Tombstoned memories still appear in similarity-scan recall results",
            "user_impact": "可能增加新用户试用和生产接入成本。"
          },
          {
            "body": "GitHub 社区证据显示该项目存在一个安装相关的待验证问题：[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently disabled",
            "category": "安装坑",
            "evidence": [
              "community_evidence:github | cevd_17652fc680ba4b64bee5018b2d1514e4 | https://github.com/yantrikos/yantrikdb/issues/6 | 来源讨论提到 docker 相关条件，需在安装/试用前复核。"
            ],
            "severity": "medium",
            "suggested_check": "来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。",
            "title": "来源证据：[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently disabled",
            "user_impact": "可能增加新用户试用和生产接入成本。"
          },
          {
            "body": "GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)",
            "category": "安装坑",
            "evidence": [
              "community_evidence:github | cevd_daa2ca5265524c83bb21727be2a980a1 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.10 | 来源讨论提到 python 相关条件，需在安装/试用前复核。"
            ],
            "severity": "medium",
            "suggested_check": "来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。",
            "title": "来源证据：v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)",
            "user_impact": "可能增加新用户试用和生产接入成本。"
          },
          {
            "body": "GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.11 — pyo3 0.28.3 + python3.14 Support",
            "category": "安装坑",
            "evidence": [
              "community_evidence:github | cevd_91b7975fce7d49b6b87ef05b914e80b2 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.11 | 来源讨论提到 python 相关条件，需在安装/试用前复核。"
            ],
            "severity": "medium",
            "suggested_check": "来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。",
            "title": "来源证据：v0.7.11 — pyo3 0.28.3 + python3.14 Support",
            "user_impact": "可能影响升级、迁移或版本选择。"
          },
          {
            "body": "GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.4 — Python Bindings: with_default + record_text/recall_text",
            "category": "安装坑",
            "evidence": [
              "community_evidence:github | cevd_54938994017d4b5899ad9cef4e6a2723 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.4 | 来源讨论提到 python 相关条件，需在安装/试用前复核。"
            ],
            "severity": "medium",
            "suggested_check": "来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。",
            "title": "来源证据：v0.7.4 — Python Bindings: with_default + record_text/recall_text",
            "user_impact": "可能影响升级、迁移或版本选择。"
          },
          {
            "body": "GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.5 — Python UX: TypeError Guard + embedder-download in Default Wheel",
            "category": "安装坑",
            "evidence": [
              "community_evidence:github | cevd_be61ad4afd5b4f669a6f727d727474c4 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.5 | 来源讨论提到 python 相关条件，需在安装/试用前复核。"
            ],
            "severity": "medium",
            "suggested_check": "来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。",
            "title": "来源证据：v0.7.5 — Python UX: TypeError Guard + embedder-download in Default Wheel",
            "user_impact": "可能增加新用户试用和生产接入成本。"
          },
          {
            "body": "项目面向 Claude/Cursor/Codex/Gemini/OpenCode 等宿主，或安装命令涉及用户配置目录。",
            "category": "配置坑",
            "evidence": [
              "capability.host_targets | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | host_targets=mcp_host, claude, claude_code"
            ],
            "severity": "medium",
            "suggested_check": "列出会写入的配置文件、目录和卸载/回滚步骤。",
            "title": "可能修改宿主 AI 配置",
            "user_impact": "安装可能改变本机 AI 工具行为，用户需要知道写入位置和回滚方法。"
          },
          {
            "body": "GitHub 社区证据显示该项目存在一个配置相关的待验证问题：v0.7.7 — recall_text Keyword-Only Filter Args",
            "category": "配置坑",
            "evidence": [
              "community_evidence:github | cevd_45587e0ca02f4e95ac36c364d3a88519 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.7 | 来源讨论提到 python 相关条件，需在安装/试用前复核。"
            ],
            "severity": "medium",
            "suggested_check": "来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。",
            "title": "来源证据：v0.7.7 — recall_text Keyword-Only Filter Args",
            "user_impact": "可能增加新用户试用和生产接入成本。"
          },
          {
            "body": "README/documentation is current enough for a first validation pass.",
            "category": "能力坑",
            "evidence": [
              "capability.assumptions | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | README/documentation is current enough for a first validation pass."
            ],
            "severity": "medium",
            "suggested_check": "将假设转成下游验证清单。",
            "title": "能力判断依赖假设",
            "user_impact": "假设不成立时，用户拿不到承诺的能力。"
          },
          {
            "body": "GitHub 社区证据显示该项目存在一个运行相关的待验证问题：think() runs consolidation before conflict detection — contradictions get merged",
            "category": "运行坑",
            "evidence": [
              "community_evidence:github | cevd_6908447fb6a6482f89b1a85e714de42a | https://github.com/yantrikos/yantrikdb/issues/1 | 来源讨论提到 python 相关条件，需在安装/试用前复核。"
            ],
            "severity": "medium",
            "suggested_check": "来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。",
            "title": "来源证据：think() runs consolidation before conflict detection — contradictions get merged",
            "user_impact": "可能增加新用户试用和生产接入成本。"
          },
          {
            "body": "未记录 last_activity_observed。",
            "category": "维护坑",
            "evidence": [
              "evidence.maintainer_signals | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | last_activity_observed missing"
            ],
            "severity": "medium",
            "suggested_check": "补 GitHub 最近 commit、release、issue/PR 响应信号。",
            "title": "维护活跃度未知",
            "user_impact": "新项目、停更项目和活跃项目会被混在一起，推荐信任度下降。"
          },
          {
            "body": "no_demo",
            "category": "安全/权限坑",
            "evidence": [
              "downstream_validation.risk_items | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | no_demo; severity=medium"
            ],
            "severity": "medium",
            "suggested_check": "进入安全/权限治理复核队列。",
            "title": "下游验证发现风险项",
            "user_impact": "下游已经要求复核，不能在页面中弱化。"
          },
          {
            "body": "No sandbox install has been executed yet; downstream must verify before user use.",
            "category": "安全/权限坑",
            "evidence": [
              "risks.safety_notes | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | No sandbox install has been executed yet; downstream must verify before user use."
            ],
            "severity": "medium",
            "suggested_check": "转成明确权限清单和安全审查提示。",
            "title": "存在安全注意事项",
            "user_impact": "用户安装前需要知道权限边界和敏感操作。"
          }
        ],
        "source": "ProjectPitfallLog + ProjectHitPacket + validation + community signals",
        "summary": "发现 24 个潜在踩坑项，其中 0 个为 high/blocking；最高优先级：安装坑 - 来源证据：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication。",
        "title": "踩坑日志"
      },
      "snapshot": {
        "contributors": 2,
        "forks": 6,
        "license": "unknown",
        "note": "站点快照，非实时质量证明；用于开工前背景判断。",
        "stars": 17
      },
      "source_url": "https://github.com/yantrikos/yantrikdb",
      "steps": [
        {
          "body": "不安装项目，先体验能力节奏。",
          "code": "preview",
          "title": "先试 Prompt"
        },
        {
          "body": "理解输入、输出、失败模式和边界。",
          "code": "manual",
          "title": "读说明书"
        },
        {
          "body": "把上下文交给宿主 AI 继续工作。",
          "code": "context",
          "title": "带给 AI"
        },
        {
          "body": "进入主力环境前先完成安装入口与风险边界验证。",
          "code": "verify",
          "title": "沙箱验证"
        }
      ],
      "subtitle": "Cognitive memory engine for AI agents — temporal decay, contradiction detection, autonomous consolidation, knowledge graph, ANN recall via HNSW. Embeddable Rust library with Python bindings; powers yantrikdb-server (HTTP gateway, MCP server, openraft cluster). AGPL.",
      "title": "yantrikdb 能力包",
      "trial_prompt": "# yantrikdb - Prompt Preview\n\n> Copy the prompt below into your AI host before installing anything.\n> Its purpose is to let you safely feel the project's workflow, not to claim the project has already run.\n\n## Copy this prompt\n\n```text\nYou are using an independent Doramagic capability pack for yantrikos/yantrikdb.\n\nProject:\n- Name: yantrikdb\n- Repository: https://github.com/yantrikos/yantrikdb\n- Summary: Cognitive memory engine for AI agents — temporal decay, contradiction detection, autonomous consolidation, knowledge graph, ANN recall via HNSW. Embeddable Rust library with Python bindings; powers yantrikdb-server (HTTP gateway, MCP server, openraft cluster). AGPL.\n- Host target: mcp_host, claude, claude_code\n\nGoal:\nHelp me evaluate this project for the following task without installing it yet: Cognitive memory engine for AI agents — temporal decay, contradiction detection, autonomous consolidation, knowledge graph, ANN recall via HNSW. Embeddable Rust library with Python bindings; powers yantrikdb-server (HTTP gateway, MCP server, openraft cluster). AGPL.\n\nBefore taking action:\n1. Restate my task, success standard, and boundary.\n2. Identify whether the next step requires tools, browser access, network access, filesystem access, credentials, package installation, or host configuration.\n3. Use only the Doramagic Project Pack, the upstream repository, and the source-linked evidence listed below.\n4. If a real command, install step, API call, file write, or host integration is required, mark it as \"requires post-install verification\" and ask for approval first.\n5. If evidence is missing, say \"evidence is missing\" instead of filling the gap.\n\nPreviewable capabilities:\n- Capability 1: Cognitive memory engine for AI agents — temporal decay, contradiction detection, autonomous consolidation, knowledge graph, ANN recall via HNSW. Embeddable Rust library with Python bindings; powers yantrikdb-server (HTTP gateway, MCP server, openraft cluster). AGPL.\n\nCapabilities that require post-install verification:\n- Capability 1: Use the source-backed project context to guide one small, checkable workflow step.\n\nCore service flow:\n1. page-overview: Overview. Produce one small intermediate artifact and wait for confirmation.\n2. page-installation: Installation. Produce one small intermediate artifact and wait for confirmation.\n3. page-five-index-architecture: Five-Index Architecture. Produce one small intermediate artifact and wait for confirmation.\n4. page-decoupled-write-path: Decoupled Write Path (LSM Architecture). Produce one small intermediate artifact and wait for confirmation.\n5. page-core-api: Core API Reference. Produce one small intermediate artifact and wait for confirmation.\n\nSource-backed evidence to keep in mind:\n- https://github.com/yantrikos/yantrikdb\n- https://github.com/yantrikos/yantrikdb#readme\n- README.md\n- crates/yantrikdb-core/src/lib.rs\n- pyproject.toml\n- crates/yantrikdb-python/Cargo.toml\n- crates/yantrikdb-python/pyproject.toml\n- src/yantrikdb/__init__.py\n- crates/yantrikdb-core/src/vector/hnsw.rs\n- crates/yantrikdb-core/src/vector/delta_index.rs\n\nFirst response rules:\n1. Start Step 1 only.\n2. Explain the one service action you will perform first.\n3. Ask exactly three questions about my target workflow, success standard, and sandbox boundary.\n4. Stop and wait for my answers.\n\nStep 1 follow-up protocol:\n- After I answer the first three questions, stay in Step 1.\n- Produce six parts only: clarified task, success standard, boundary conditions, two or three options, tradeoffs for each option, and one recommendation.\n- End by asking whether I confirm the recommendation.\n- Do not move to Step 2 until I explicitly confirm.\n\nConversation rules:\n- Advance one step at a time and wait for confirmation after each small artifact.\n- Write outputs as recommendations or planned checks, not as completed execution.\n- Do not claim tests passed, files changed, commands ran, APIs were called, or the project was installed.\n- If the user asks for execution, first provide the sandbox setup, expected output, rollback, and approval checkpoint.\n```\n",
      "voices": [
        {
          "body": "来源平台：github。github/github_issue: API addition: deterministic mutation primitives (record_with_rid + frien（https://github.com/yantrikos/yantrikdb/issues/9）；github/github_issue: Migration v14→v15 fails: ALTER TABLE on edges view（https://github.com/yantrikos/yantrikdb/issues/10）；github/github_issue: [bug] Tombstoned memories still appear in similarity-scan recall results（https://github.com/yantrikos/yantrikdb/issues/8）；github/github_issue: [bug] POST /v1/admin/snapshot unusable in single-node mode — requires cl（https://github.com/yantrikos/yantrikdb/issues/7）；github/github_issue: [bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently（https://github.com/yantrikos/yantrikdb/issues/6）；github/github_issue: [bug] at-rest encryption `key_hex` in TOML has no effect on disk (v0.5.0（https://github.com/yantrikos/yantrikdb/issues/3）；github/github_issue: Bug: `namespace` parameter ignored in batch `remember` calls — memories （https://github.com/yantrikos/yantrikdb/issues/2）；github/github_issue: think() runs consolidation before conflict detection — contradictions ge（https://github.com/yantrikos/yantrikdb/issues/1）；github/github_release: v0.7.11 — pyo3 0.28.3 + python3.14 Support（https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.11）；github/github_release: v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)（https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.10）；github/github_release: v0.7.9 — Bundle potion-multilingual-128M (101 Languages) in embedder-dow（https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.9）；github/github_release: v0.7.8 — Extended Idempotent Migration Runner (closes #10)（https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.8）。这些是项目级外部声音，不作为单独质量证明。",
          "items": [
            {
              "kind": "github_issue",
              "source": "github",
              "title": "API addition: deterministic mutation primitives (record_with_rid + frien",
              "url": "https://github.com/yantrikos/yantrikdb/issues/9"
            },
            {
              "kind": "github_issue",
              "source": "github",
              "title": "Migration v14→v15 fails: ALTER TABLE on edges view",
              "url": "https://github.com/yantrikos/yantrikdb/issues/10"
            },
            {
              "kind": "github_issue",
              "source": "github",
              "title": "[bug] Tombstoned memories still appear in similarity-scan recall results",
              "url": "https://github.com/yantrikos/yantrikdb/issues/8"
            },
            {
              "kind": "github_issue",
              "source": "github",
              "title": "[bug] POST /v1/admin/snapshot unusable in single-node mode — requires cl",
              "url": "https://github.com/yantrikos/yantrikdb/issues/7"
            },
            {
              "kind": "github_issue",
              "source": "github",
              "title": "[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently",
              "url": "https://github.com/yantrikos/yantrikdb/issues/6"
            },
            {
              "kind": "github_issue",
              "source": "github",
              "title": "[bug] at-rest encryption `key_hex` in TOML has no effect on disk (v0.5.0",
              "url": "https://github.com/yantrikos/yantrikdb/issues/3"
            },
            {
              "kind": "github_issue",
              "source": "github",
              "title": "Bug: `namespace` parameter ignored in batch `remember` calls — memories ",
              "url": "https://github.com/yantrikos/yantrikdb/issues/2"
            },
            {
              "kind": "github_issue",
              "source": "github",
              "title": "think() runs consolidation before conflict detection — contradictions ge",
              "url": "https://github.com/yantrikos/yantrikdb/issues/1"
            },
            {
              "kind": "github_release",
              "source": "github",
              "title": "v0.7.11 — pyo3 0.28.3 + python3.14 Support",
              "url": "https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.11"
            },
            {
              "kind": "github_release",
              "source": "github",
              "title": "v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)",
              "url": "https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.10"
            },
            {
              "kind": "github_release",
              "source": "github",
              "title": "v0.7.9 — Bundle potion-multilingual-128M (101 Languages) in embedder-dow",
              "url": "https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.9"
            },
            {
              "kind": "github_release",
              "source": "github",
              "title": "v0.7.8 — Extended Idempotent Migration Runner (closes #10)",
              "url": "https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.8"
            }
          ],
          "status": "已收录 12 条来源",
          "title": "社区讨论"
        }
      ]
    },
    "homepage_card": {
      "category": "工具连接与集成",
      "desc": "Cognitive memory engine for AI agents — temporal decay, contradiction detection, autonomous consolidation, knowledge graph, ANN recall via HNSW. Embeddable Rust library with Python bindings; powers yantrikdb-server (HTTP gateway, MCP server, openraft cluster). AGPL.",
      "effort": "安装已验证",
      "forks": 6,
      "icon": "link",
      "name": "yantrikdb 能力包",
      "risk": "可发布",
      "slug": "yantrikdb",
      "stars": 17,
      "tags": [
        "MCP 工具",
        "知识库问答",
        "自然语言网页操作",
        "断点恢复流程",
        "评测体系"
      ],
      "thumb": "gray",
      "type": "MCP 配置"
    },
    "manual": {
      "markdown": "# https://github.com/yantrikos/yantrikdb 项目说明书\n\n生成时间：2026-05-16 13:00:42 UTC\n\n## 目录\n\n- [Overview](#page-overview)\n- [Installation](#page-installation)\n- [Five-Index Architecture](#page-five-index-architecture)\n- [Decoupled Write Path (LSM Architecture)](#page-decoupled-write-path)\n- [Storage Engine](#page-storage-engine)\n- [Core API Reference](#page-core-api)\n- [Cognition Layer](#page-cognition-layer)\n- [Conflict Detection and Resolution](#page-conflict-resolution)\n- [MCP Server Integration](#page-mcp-server)\n- [Python Bindings](#page-python-bindings)\n\n<a id='page-overview'></a>\n\n## Overview\n\n### 相关页面\n\n相关主题：[Five-Index Architecture](#page-five-index-architecture), [Core API Reference](#page-core-api), [Installation](#page-installation)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-core/src/cognition/state.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n- [crates/yantrikdb-core/src/cognition/query_dsl.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/query_dsl.rs)\n- [crates/yantrikdb-core/src/base/types.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/types.rs)\n- [crates/yantrikdb-core/src/cognition/narrative.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/narrative.rs)\n- [crates/yantrikdb-core/src/cognition/personality_bias.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/personality_bias.rs)\n- [crates/yantrikdb-core/src/cognition/receptivity.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/receptivity.rs)\n- [crates/yantrikdb-core/src/engine/query_dsl.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/query_dsl.rs)\n</details>\n\n# Overview\n\nYantrikDB is a cognitive memory database system designed to model, store, and reason about complex human mental states and behaviors. It provides a unified architecture for managing episodic memories, semantic knowledge, and procedural information while supporting advanced cognitive operations such as attention spreading, belief revision, and proactive suggestion surfacing.\n\nThe system bridges traditional database storage with cognitive science principles, enabling applications that require understanding of user intent, emotional states, preferences, and behavioral patterns.\n\n## Architecture Overview\n\nYantrikDB follows a layered architecture that separates storage, cognitive processing, and query execution concerns.\n\n```mermaid\ngraph TD\n    A[Python API Layer] --> B[Query DSL Engine]\n    B --> C[Cognition Module]\n    C --> D[Working Set Cache]\n    D --> E[SQLite Storage]\n    C --> F[Conflict Resolution]\n    C --> G[Proactive Surfacing]\n    C --> H[Narrative Tracking]\n```\n\n### Core Modules\n\n| Module | Location | Purpose |\n|--------|----------|---------|\n| `state.rs` | `cognition/` | Defines cognitive node types, edge kinds, and universal attributes |\n| `query_dsl.rs` | `cognition/` | Specifies cognitive operators (Recall, Believe, Plan, etc.) |\n| `narrative.rs` | `cognition/` | Manages narrative arcs and story tracking |\n| `personality_bias.rs` | `cognition/` | Models personality dimensions affecting system behavior |\n| `receptivity.rs` | `cognition/` | Tracks user activity levels and notification preferences |\n| `types.rs` | `base/` | Defines conflict types, trigger mechanisms, and configuration |\n| `engine/query_dsl.rs` | `engine/` | Executes cognitive operators against the database |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:1-50]()\n\n## Cognitive Node Model\n\nEvery entity in YantrikDB is represented as a cognitive node with universal attributes that determine how it participates in reasoning, memory consolidation, and action selection.\n\n### Node Kinds\n\nThe system supports 15 distinct node types representing different mental constructs:\n\n| Kind | Persistence | Description |\n|------|-------------|-------------|\n| `Entity` | Yes | Physical or conceptual objects |\n| `Episode` | Yes | Temporal memory of events |\n| `Belief` | Yes | User-held beliefs about the world |\n| `Goal` | Yes | Desired end states |\n| `Task` | Yes | Actionable items with status tracking |\n| `IntentHypothesis` | No | Transient intent guesses |\n| `Routine` | Yes | Recurring behavioral patterns |\n| `Need` | Yes | User needs (8 categories) |\n| `Opportunity` | Yes | Time-bounded chances for action |\n| `Risk` | Yes | Potential problems |\n| `Constraint` | Yes | Safety or preference constraints |\n| `Preference` | Yes | User preferences |\n| `ConversationThread` | No | Transient conversation state |\n| `ActionSchema` | Yes | Reusable action templates |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:150-180]()\n\n### Universal Cognitive Attributes\n\nEvery node carries 11 dimensions that govern its lifecycle and behavior:\n\n```mermaid\ngraph LR\n    A[Cognitive Node] --> B[confidence 0-1]\n    A --> C[activation 0-1]\n    A --> D[salience 0-1]\n    A --> E[persistence 0-1]\n    A --> F[valence -1 to 1]\n    A --> G[urgency 0-1]\n    A --> H[novelty 0-1]\n    A --> I[volatility 0-1]\n    A --> J[provenance]\n    A --> K[evidence_count]\n```\n\nThe default values for each node kind vary based on their expected characteristics:\n\n| NodeKind | confidence | salience | persistence |\n|----------|------------|----------|-------------|\n| Entity | 0.90 | 0.80 | 0.95 |\n| Episode | 0.70 | 0.70 | 0.30 |\n| Belief | 0.60 | 0.70 | 0.60 |\n| Goal | 0.80 | 0.90 | 0.80 |\n| Task | 0.90 | 0.80 | 0.40 |\n| Need | 0.60 | 0.70 | 0.40 |\n| Opportunity | 0.40 | 0.60 | 0.20 |\n| Risk | 0.40 | 0.70 | 0.60 |\n| Constraint | 0.90 | 0.80 | 0.95 |\n| Preference | 0.60 | 0.50 | 0.85 |\n| ActionSchema | 0.70 | 0.40 | 0.90 |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:280-320]()\n\n## Cognitive Edge Model\n\nNodes connect through typed edges that encode semantic relationships and govern activation spreading.\n\n### Edge Kinds\n\nThere are 15 edge types in the cognitive graph:\n\n| Edge Kind | Transfer Factor | Type | Description |\n|-----------|-----------------|------|-------------|\n| `supports` | 0.7 | Positive | Evidence backing a belief |\n| `contradicts` | -0.5 | Inhibitory | Evidence opposing a belief |\n| `causes` | 0.8 | Positive | Causal relationship |\n| `predicts` | 0.4 | Positive | Future outcome prediction |\n| `prevents` | -0.6 | Inhibitory | Blocks an outcome |\n| `advances_goal` | 0.6 | Positive | Progress toward goal |\n| `blocks_goal` | -0.5 | Inhibitory | Impedes goal progress |\n| `subtask_of` | 0.4 | Positive | Decomposition relationship |\n| `requires` | 0.5 | Positive | Prerequisite relationship |\n| `associated_with` | 0.3 | Moderate | General correlation |\n| `instance_of` | 0.3 | Moderate | Categorization |\n| `part_of` | 0.3 | Moderate | Compositional |\n| `similar_to` | 0.3 | Moderate | Analogy |\n| `precedes_temporally` | 0.2 | Moderate | Temporal ordering |\n| `triggers` | 0.7 | Positive | Event initiation |\n| `prefers` | 0.3 | Moderate | Preference relationship |\n| `avoids` | -0.3 | Inhibitory | Avoidance pattern |\n| `constrains` | -0.2 | Inhibitory | Limitation relationship |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:120-160]()\n\n### Edge Behavior Methods\n\nEach edge kind provides metadata through dedicated methods:\n\n- `activation_transfer()` - Returns the spreading activation factor (-1.0 to 1.0)\n- `is_inhibitory()` - Boolean indicating suppression behavior\n- `is_epistemic()` - Whether edge participates in belief revision\n- `is_causal()` - Whether edge represents causal relationships\n\n```rust\npub fn is_inhibitory(self) -> bool {\n    self.activation_transfer() < 0.0\n}\n\npub fn is_epistemic(self) -> bool {\n    matches!(self, Self::Supports | Self::Contradicts)\n}\n\npub fn is_causal(self) -> bool {\n    matches!(self, Self::Causes | Self::Predicts | Self::Prevents)\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:180-200]()\n\n## Cognitive Operators\n\nThe query DSL defines 10 operators that compose the cognitive processing pipeline:\n\n| Operator | Priority | Purpose |\n|----------|----------|---------|\n| `Attend` | 10 | Foundation — always runs first |\n| `Recall` | 9 | Critical for context retrieval |\n| `Believe` | 8 | Evidence integration |\n| `Compare` | 7 | Action selection |\n| `Constrain` | 7 | Safety validation |\n| `Plan` | 6 | Means-ends reasoning |\n| `Project` | 5 | Forward simulation |\n| `Anticipate` | 4 | Proactive reasoning |\n| `Assess` | 3 | Meta-cognitive evaluation |\n| `CoherenceCheck` | 2 | Maintenance under budget pressure |\n\n资料来源：[crates/yantrikdb-core/src/cognition/query_dsl.rs:30-45]()\n\n### Operator Parameters\n\nEach operator accepts typed parameters:\n\n```rust\npub struct AttendOp {\n    pub seeds: Vec<NodeId>,\n    pub max_hops: u32,\n    pub decay: f64,\n}\n\npub struct RecallOp {\n    pub top_k: usize,\n    pub query: Option<String>,\n    pub domain: Option<String>,\n}\n\npub struct BelieveOp {\n    pub evidence: EvidenceInput,\n}\n\npub struct EvidenceInput {\n    pub target: Option<NodeId>,\n    pub observation: String,\n    pub direction: i32,  // positive = confirming, negative = contradicting\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/cognition/query_dsl.rs:55-80]()\n\n### Execution Flow\n\n```mermaid\ngraph TD\n    A[Cognitive Query] --> B{Operator Type}\n    B --> C[Attend]\n    B --> D[Recall]\n    B --> E[Believe]\n    B --> F[Compare]\n    B --> G[Plan]\n    B --> H[Project]\n    B --> I[Anticipate]\n    B --> J[Assess]\n    B --> K[CoherenceCheck]\n    \n    C --> L[Working Set Hydration]\n    L --> M[Activation Boost on Seeds]\n    M --> N[Spreading Activation]\n    N --> O[StepOutput]\n```\n\nThe executor processes operators by first hydrating the working set from SQLite, then executing operator-specific logic:\n\n```rust\nfn execute_attend(&self, op: &AttendOp) -> StepOutput {\n    match self.db.hydrate_working_set(self.attention_config.clone()) {\n        Ok(mut ws) => {\n            let mut activated = 0;\n            let mut top = Vec::new();\n            for &seed in &op.seeds {\n                if let Some(node) = ws.get_mut(seed) {\n                    let new_activation = (node.attrs.activation + 0.3).min(1.0);\n                    node.attrs.activation = new_activation;\n                    top.push((seed, new_activation));\n                    activated += 1;\n                }\n            }\n            for &seed in &op.seeds {\n                activated += ws.activate_and_spread(seed, 0.3);\n            }\n            StepOutput::Attend { nodes_activated: activated, top_activated: top }\n        }\n        Err(e) => StepOutput::Error { message: format!(\"Attend failed: {}\", e) },\n    }\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/engine/query_dsl.rs:100-130]()\n\n## Need Categories\n\nThe system models 8 categories of human needs:\n\n| Category | Description |\n|----------|-------------|\n| `Informational` | Knowledge and understanding needs |\n| `Social` | Connection and relationship needs |\n| `Emotional` | Affective and psychological needs |\n| `Organizational` | Structure and planning needs |\n| `Creative` | Self-expression and innovation needs |\n| `Health` | Physical and mental wellness needs |\n| `Financial` | Economic and resource needs |\n| `Professional` | Career and work-related needs |\n\n```rust\npub fn from_str(s: &str) -> Self {\n    match s {\n        \"informational\" => Self::Informational,\n        \"social\" => Self::Social,\n        \"emotional\" => Self::Emotional,\n        \"organizational\" => Self::Organizational,\n        \"creative\" => Self::Creative,\n        \"health\" => Self::Health,\n        \"financial\" => Self::Financial,\n        \"professional\" => Self::Professional,\n        _ => Self::Informational,\n    }\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:10-45]()\n\n## Provenance and Reliability\n\nEvery cognitive node carries provenance metadata indicating its source:\n\n| Provenance | Reliability Prior | Description |\n|------------|-------------------|-------------|\n| `Told` | 0.95 | User explicitly stated |\n| `Observed` | 0.90 | Directly observed behavior |\n| `Experimented` | 0.85 | Confirmed via controlled experiment |\n| `Consolidated` | 0.80 | Merged from multiple sources |\n| `Extracted` | 0.75 | From external documents |\n| `Inferred` | 0.60 | Pattern-based inference |\n| `SystemDefault` | 0.50 | Defaults — weakest trust |\n\n```rust\npub fn reliability_prior(self) -> f64 {\n    match self {\n        Self::Told => 0.95,\n        Self::Observed => 0.90,\n        Self::Experimented => 0.85,\n        Self::Consolidated => 0.80,\n        Self::Extracted => 0.75,\n        Self::Inferred => 0.60,\n        Self::SystemDefault => 0.50,\n    }\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:220-250]()\n\n## Action Kinds and Costs\n\nThe system models 8 types of actions with associated base costs:\n\n| Action | Base Cost | Description |\n|--------|-----------|-------------|\n| `Abstain` | 0.0 | Explicitly decide inaction |\n| `Inform` | 0.05 | Provide information |\n| `Organize` | 0.10 | Structure content |\n| `Suggest` | 0.15 | Propose an option |\n| `Communicate` | 0.20 | Send a message |\n| `Schedule` | 0.25 | Create calendar events |\n| `Execute` | 0.30 | Take direct action |\n| `Warn` | 0.30 | Alert about risk |\n\nHigher cost indicates more disruption to the user.\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:250-290]()\n\n## Conflict Resolution\n\nThe system detects and resolves conflicts between memories using policy-aware evaluation:\n\n### Conflict Types\n\n| Type | Default Priority | Description |\n|------|------------------|-------------|\n| `IdentityFact` | critical | Conflicting identity claims |\n| `Preference` | high | Contradicting preferences |\n| `Temporal` | high | Time-based contradictions |\n| `Consolidation` | medium | During memory consolidation |\n| `Minor` | low | Minor inconsistencies |\n\n### Conflict Detection Flow\n\n```mermaid\ngraph TD\n    A[Memory Operations] --> B[Candidate Pair Generation]\n    B --> C{Policy Check}\n    C -->|overlap_allowed| D[Flag as Conflict]\n    C -->|temporal_required| E{Time Validation}\n    E -->|Valid| D\n    E -->|Invalid| F[Apply Missing Time Severity]\n    C -->|No Policy| G[Default Behavior]\n```\n\nThe conflict resolution system queries namespace-specific policies:\n\n```sql\nSELECT overlap_allowed, temporal_required, missing_time_severity\nFROM relation_policies\nWHERE relation_type = ?1 AND (namespace = ?2 OR namespace = '*')\nORDER BY CASE WHEN namespace = ?2 THEN 0 ELSE 1 END\n```\n\n资料来源：[crates/yantrikdb-core/src/distributed/conflict.rs:50-80]()\n\n## Narrative Tracking\n\nYantrikDB tracks narrative arcs to understand ongoing stories and life patterns:\n\n### Arc Types\n\n| Type | Description |\n|------|-------------|\n| `Relationship` | Interpersonal dynamics |\n| `Project` | Goal-oriented endeavors |\n| `Habit` | Recurring behaviors |\n| `Discovery` | Learning journeys |\n| `Loss` | Negative life events |\n| `Recovery` | Healing processes |\n\n### Arc Lifecycle\n\n```mermaid\ngraph LR\n    A[Emerging] --> B[Active]\n    B --> C[Paused]\n    C -->|Resume| B\n    C --> D[Resolved]\n    C --> E[Abandoned]\n    A -->|Quality| E\n```\n\n### Chapter Types\n\nWithin arcs, chapters progress through phases:\n\n| Type | Purpose |\n|------|---------|\n| `Setup` | Initial context setting |\n| `Rising` | Building tension or progress |\n| `Climax` | Peak moment |\n| `Falling` | Winding down |\n| `Resolution` | Final conclusion |\n| `Interlude` | Pause between main chapters |\n\n资料来源：[crates/yantrikdb-core/src/cognition/narrative.rs:30-80]()\n\n## Personality Model\n\nThe system models personality across 8 dimensions affecting system behavior:\n\n| Dimension | Description |\n|-----------|-------------|\n| `curiosity` | Drive to explore and learn |\n| `proactivity` | Tendency to initiate action |\n| `caution` | Risk aversion level |\n| `warmth` | Emotional engagement |\n| `efficiency` | Optimization preference |\n| `playfulness` | Humor and levity |\n| `formality` | Communication style |\n| `persistence` | Follow-through tendency |\n\n```rust\npub const DIMENSION_NAMES: [&'static str; 8] = [\n    \"curiosity\", \"proactivity\", \"caution\", \"warmth\",\n    \"efficiency\", \"playfulness\", \"formality\", \"persistence\",\n];\n\npub fn similarity(&self, other: &Self) -> f64 {\n    // Cosine similarity between personality vectors\n    let mut dot = 0.0;\n    let mut mag_a = 0.0;\n    let mut mag_b = 0.0;\n    for i in 0..Self::DIMENSIONS {\n        let a = self.dimension(i);\n        let b = other.dimension(i);\n        dot += a * b;\n        mag_a += a * a;\n        mag_b += b * b;\n    }\n    // Normalized cosine similarity\n    (dot / (mag_a.sqrt() * mag_b.sqrt())).clamp(-1.0, 1.0)\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/cognition/personality_bias.rs:50-90]()\n\n## User Receptivity\n\nThe system tracks user activity states to optimize notification timing:\n\n### Activity Levels\n\n| Level | Interruption Cost | Description |\n|-------|-------------------|-------------|\n| `Idle` | 0.15 | No active engagement |\n| `JustReturned` | 0.35 | Recently became active |\n| `Browsing` | 0.45 | Casual content consumption |\n| `Communicating` | 0.50 | In active conversation |\n| `TaskSwitching` | 0.55 | Mid-task context switch |\n| `FocusedWork` | 0.75 | Deep concentration |\n| `DeepFocus` | 0.95 | Critical focus period |\n\n### Notification Modes\n\n| Mode | Behavior |\n|------|----------|\n| `All` | All notifications allowed |\n| `ImportantOnly` | Only important notifications |\n| `DoNotDisturb` | Block all notifications |\n\n资料来源：[crates/yantrikdb-core/src/cognition/receptivity.rs:20-70]()\n\n## Think() Configuration\n\nThe cognitive loop is configured via `ThinkConfig`:\n\n```rust\npub struct ThinkConfig {\n    pub importance_threshold: f64,\n    pub decay_threshold: f64,\n    pub max_triggers: usize,\n}\n```\n\n### Trigger Types\n\n| Type | Cooldown | Expiry |\n|------|----------|--------|\n| `DecayReview` | 3 days | 7 days |\n| `ConsolidationReady` | 1 day | 3 days |\n| `ConflictEscalation` | 2 days | 14 days |\n| `TemporalDrift` | 14 days | 7 days |\n| `Redundancy` | 1 day | 7 days |\n| `RelationshipInsight` | 7 days | 7 days |\n| `ValenceTrend` | 7 days | 7 days |\n| `EntityAnomaly` | 7 days | 7 days |\n| `PatternDiscovered` | 7 days | 7 days |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs:100-150]()\n\n## Python API\n\nThe system exposes a Python interface for memory operations:\n\n```rust\n#[pyo3(signature = (\n    query=None, embedding=None, top_k=10, memory_type=None, namespace=None,\n    time_window=None, expand_entities=false, include_consolidated=false,\n    skip_reinforce=false, domain=None, source=None\n))]\nfn query(\n    &self,\n    py: Python<'_>,\n    query: Option<&str>,\n    embedding: Option<Vec<f32>>,\n    top_k: usize,\n    // ... additional parameters\n) -> PyResult<Vec<PyObject>>\n```\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:50-90]()\n\n## Summary\n\nYantrikDB provides a comprehensive cognitive memory architecture featuring:\n\n- **15 cognitive node types** with 11 universal attributes each\n- **18 edge kinds** with configurable activation transfer\n- **10 cognitive operators** for reasoning and context management\n- **8 need categories** for human motivation modeling\n- **7 provenance levels** with reliability priors\n- **8 personality dimensions** for behavioral adaptation\n- **6 activity states** for interruption optimization\n- **Policy-aware conflict resolution** with namespace support\n- **Narrative arc tracking** across 6 lifecycle states\n\nThe system balances persistent storage with working-set caching to support real-time cognitive operations while maintaining long-term memory integrity.\n\n---\n\n<a id='page-installation'></a>\n\n## Installation\n\n### 相关页面\n\n相关主题：[Overview](#page-overview), [Python Bindings](#page-python-bindings), [MCP Server Integration](#page-mcp-server)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [pyproject.toml](https://github.com/yantrikos/yantrikdb/blob/main/pyproject.toml)\n- [crates/yantrikdb-python/Cargo.toml](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/Cargo.toml)\n- [crates/yantrikdb-python/pyproject.toml](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/pyproject.toml)\n- [src/yantrikdb/__init__.py](https://github.com/yantrikos/yantrikdb/blob/main/src/yantrikdb/__init__.py)\n</details>\n\n# Installation\n\nThis guide covers all supported methods for installing YantrikDB across different platforms and use cases.\n\n## Prerequisites\n\n### System Requirements\n\n| Requirement | Minimum | Recommended |\n|-------------|---------|-------------|\n| Python | 3.9 | 3.11+ |\n| Operating System | Linux, macOS, Windows | Linux (x86_64, aarch64), macOS (Intel, Apple Silicon), Windows (\nError with Openai API: output new_sensitive (1027)\n\nPlease check that you have set the OPENAI_API_KEY environment variable with a valid API key.\n\n---\n\n<a id='page-five-index-architecture'></a>\n\n## Five-Index Architecture\n\n### 相关页面\n\n相关主题：[Decoupled Write Path (LSM Architecture)](#page-decoupled-write-path), [Storage Engine](#page-storage-engine), [Core API Reference](#page-core-api)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-core/src/vector/hnsw.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/vector/hnsw.rs)\n- [crates/yantrikdb-core/src/vector/delta_index.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/vector/delta_index.rs)\n- [crates/yantrikdb-core/src/knowledge/graph.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/knowledge/graph.rs)\n- [crates/yantrikdb-core/src/knowledge/graph_index.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/knowledge/graph_index.rs)\n- [crates/yantrikdb-core/src/engine/storage.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/storage.rs)\n- [crates/yantrikdb-core/src/engine/indices.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/indices.rs)\n</details>\n\n# Five-Index Architecture\n\n## Overview\n\nThe Five-Index Architecture is yantrikdb's multi-layered indexing system designed to support diverse query patterns across cognitive memory types. Each index layer specializes in a specific access pattern—vector similarity, temporal ordering, graph traversal, full-text search, and structured filtering—enabling the engine to retrieve relevant memories with minimal latency while maintaining consistency across the working set and persistent storage.\n\nThe architecture divides responsibility across five specialized index types:\n\n| Index | Primary Role | Access Pattern |\n|-------|--------------|----------------|\n| **HNSW Index** | Vector similarity search | ANN queries on embeddings |\n| **Delta Index** | Recent writes and updates | In-memory working set |\n| **Graph Index** | Relationship traversal | Multi-hop graph queries |\n| **Storage Index** | Persistent record management | CRUD operations with SQLite |\n| **Recall Index** | Federated cross-index queries | Multi-dimensional recall |\n\n资料来源：[crates/yantrikdb-core/src/engine/indices.rs:1-50]()\n\n## Architecture Diagram\n\n```mermaid\ngraph TB\n    subgraph \"Query Interface\"\n        Q[RecallQuery]\n    end\n    \n    subgraph \"Five Index Layers\"\n        H[HNSW Index<br/>Vector ANN]\n        D[Delta Index<br/>Working Set]\n        G[Graph Index<br/>Relationships]\n        S[Storage Index<br/>Persistent SQLite]\n        R[Recall Index<br/>Federated Router]\n    end\n    \n    subgraph \"Data Sources\"\n        E[Embedding Cache]\n        M[Memory Nodes]\n        R2[Relational Tables]\n    end\n    \n    Q --> R\n    R --> H\n    R --> D\n    R --> G\n    R --> S\n    \n    H --> E\n    D --> M\n    G --> M\n    S --> R2\n```\n\n## HNSW Index Layer\n\n### Purpose and Scope\n\nThe Hierarchical Navigable Small World (HNSW) index provides approximate nearest neighbor (ANN) search over high-dimensional embedding vectors. This layer powers semantic similarity queries, enabling the system to retrieve memories based on meaning rather than exact keyword matches.\n\n资料来源：[crates/yantrikdb-core/src/vector/hnsw.rs:1-100]()\n\n### Key Components\n\nThe HNSW implementation in yantrikdb supports the following configuration parameters:\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `m` | `u32` | 16 | Max connections per node |\n| `ef_construction` | `u32` | 200 | Search width during build |\n| `ef_search` | `u32` | 100 | Search width during query |\n| `level_mult` | `f64` | 1/ln(M) | Level generation factor |\n\n### Query Flow\n\n```mermaid\nsequenceDiagram\n    participant Q as Query\n    participant H as HNSW Layer\n    participant E as Embedding Cache\n    participant R as Results\n    \n    Q->>H: RecallQuery with embedding\n    H->>H: Layer 0 scan\n    H->>H: Greedy search up layers\n    H->>E: Fetch top_k candidates\n    E-->>H: Candidate vectors\n    H->>H: Re-rank by distance\n    H-->>R: Ordered results\n```\n\n## Delta Index Layer\n\n### Purpose and Scope\n\nThe Delta Index maintains a working set of recently inserted or updated records before they are flushed to persistent storage. This write buffer enables high-throughput ingestion while preserving query consistency for recent data.\n\n资料来源：[crates/yantrikdb-core/src/vector/delta_index.rs:1-100]()\n\n### Write Path\n\nWhen a new memory is created, the system:\n\n1. Writes to the Delta Index immediately (low latency)\n2. Appending to the HNSW structure if vector is present\n3. Delaying SQLite flush until batch threshold\n\n### Consistency Model\n\nThe Delta Index implements a hybrid consistency model:\n\n- **Read-your-writes**: Queries against recent data include Delta entries\n- **Staleness bound**: Configurable flush interval (default: 1 second)\n- **Rollback support**: Unflushed entries can be discarded on abort\n\n```mermaid\ngraph LR\n    A[Write Request] --> B{Delta Index<br/>In-Memory}\n    B --> C{HNSW Update<br/>Immediate}\n    C --> D[Query Path]\n    B -.->|Flush| E[Storage Index<br/>SQLite]\n    E --> D\n```\n\n## Graph Index Layer\n\n### Purpose and Scope\n\nThe Graph Index manages typed relationships between memory nodes, supporting complex multi-hop queries. Each edge type has associated metadata including activation transfer factors and temporal validity windows.\n\n资料来源：[crates/yantrikdb-core/src/knowledge/graph.rs:1-100]()\n资料来源：[crates/yantrikdb-core/src/knowledge/graph_index.rs:1-100]()\n\n### Supported Edge Types\n\n| Edge Type | Activation Transfer | Use Case |\n|-----------|---------------------|----------|\n| `causes` | 0.8 | Causal chains |\n| `supports` | 0.7 | Supporting evidence |\n| `triggers` | 0.7 | Event triggers |\n| `advances_goal` | 0.6 | Goal progress |\n| `requires` | 0.5 | Prerequisites |\n| `subtask_of` | 0.4 | Hierarchical tasks |\n| `predicts` | 0.4 | Predictive relations |\n| `associated_with` | 0.3 | Weak associations |\n| `similar_to` | 0.3 | Analogy detection |\n| `instance_of` | 0.3 | Categorization |\n| `part_of` | 0.3 | Containment |\n| `precedes_temporally` | 0.2 | Temporal ordering |\n| `contradicts` | -0.5 | Conflict detection |\n| `blocks_goal` | -0.6 | Obstacle modeling |\n| `prevents` | -0.7 | Prevention relations |\n| `constrains` | -0.4 | Constraint edges |\n\n### Graph Traversal API\n\n```rust\n// Core graph traversal via RecallQuery\nRecallQuery::new(embedding)\n    .top_k(10)\n    .expand_entities(true)\n    .max_hops(3)\n```\n\n## Storage Index Layer\n\n### Purpose and Scope\n\nThe Storage Index provides durable persistence for all memory records using SQLite. This layer handles transaction management, crash recovery, and long-term storage optimization.\n\n资料来源：[crates/yantrikdb-core/src/engine/storage.rs:1-100]()\n\n### Schema Overview\n\n| Table | Primary Key | Indexes |\n|-------|-------------|---------|\n| `memories` | `rid` | `namespace`, `created_at`, `kind` |\n| `edges` | `(src, rel_type, dst)` | `rel_type`, `src`, `dst` |\n| `relation_policies` | `(relation_type, namespace)` | `namespace` |\n\n### Query Parameters\n\n| Parameter | Type | Description |\n|-----------|------|-------------|\n| `memory_type` | `Option<&str>` | Filter by node kind |\n| `namespace` | `Option<&str>` | Filter by namespace |\n| `time_window` | `Option<(f64, f64)>` | Temporal bounds |\n| `domain` | `Option<&str>` | Domain classification |\n| `source` | `Option<&str>` | Provenance filter |\n\n## Recall Index Layer\n\n### Purpose and Scope\n\nThe Recall Index acts as a federated query router that orchestrates multi-index searches. It combines results from HNSW, Delta, Graph, and Storage indices according to query parameters and relevance scoring.\n\n### Query Pipeline\n\n```mermaid\nflowchart TD\n    A[RecallQuery] --> B[Parse Parameters]\n    B --> C{HNSW Index}\n    B --> D{Delta Index}\n    B --> E{Graph Index}\n    B --> F{Storage Index}\n    \n    C --> G[Result Merge]\n    D --> G\n    E --> G\n    F --> G\n    \n    G --> H[Re-rank by Score]\n    H --> I[Top-K Selection]\n    I --> J[Return Ordered Results]\n```\n\n### Query Construction\n\n```rust\nlet q = RecallQuery::new(embedding)\n    .top_k(10)\n    .memory_type(\"episodic\")\n    .namespace(\"work\")\n    .time_window(start_ts, end_ts)\n    .expand_entities(true)\n    .include_consolidated(false);\n```\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:50-80]()\n\n## Index Synchronization\n\n### Write Ordering\n\nAll index updates follow a strict ordering guarantee:\n\n1. **Delta Index** receives write first (primary)\n2. **HNSW Index** updated for vector-bearing records\n3. **Graph Index** updated for edge-creating operations\n4. **Storage Index** flush queued for background persistence\n\n### Failure Recovery\n\n| Failure Point | Recovery Action |\n|---------------|-----------------|\n| Delta write fails | Abort entire transaction |\n| HNSW update fails | Mark record inconsistent, retry |\n| Graph update fails | Rollback edge, alert |\n| Storage flush fails | Retain in Delta, retry on restart |\n\n## Performance Characteristics\n\n| Operation | HNSW | Delta | Graph | Storage |\n|-----------|------|-------|-------|---------|\n| Point query | O(log n) | O(1) | O(1) | O(log n) |\n| Range query | N/A | O(n) | O(n) | O(log n + k) |\n| ANN search | O(ef × log n) | N/A | N/A | N/A |\n| Traversal | N/A | N/A | O(m^h) | N/A |\n| Write | O(log n) | O(1) | O(1) | O(log n) |\n\n## Configuration\n\nThe Five-Index system is configured via `ThinkConfig`:\n\n| Parameter | Default | Description |\n|-----------|---------|-------------|\n| `importance_threshold` | 0.5 | Minimum relevance for surfacing |\n| `decay_threshold` | 0.3 | Importance decay trigger |\n| `max_triggers` | 10 | Concurrent trigger limit |\n\n## Summary\n\nThe Five-Index Architecture enables yantrikdb to handle diverse cognitive memory workloads by specializing each index for its access pattern. The HNSW layer provides fast semantic search, Delta absorbs write bursts, Graph manages relationships, Storage ensures durability, and Recall federates queries across all layers. This design allows the system to balance latency, throughput, and consistency according to workload characteristics.\n\n---\n\n<a id='page-decoupled-write-path'></a>\n\n## Decoupled Write Path (LSM Architecture)\n\n### 相关页面\n\n相关主题：[Five-Index Architecture](#page-five-index-architecture), [Storage Engine](#page-storage-engine)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [docs/decoupled_write_path_rfc.md](https://github.com/yantrikos/yantrikdb/blob/main/docs/decoupled_write_path_rfc.md)\n- [CONCURRENCY.md](https://github.com/yantrikos/yantrikdb/blob/main/CONCURRENCY.md)\n- [crates/yantrikdb-core/src/vector/delta_index.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/vector/delta_index.rs)\n- [crates/yantrikdb-core/src/engine/materializer.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/materializer.rs)\n</details>\n\n# Decoupled Write Path (LSM Architecture)\n\n## Overview\n\nThe Decoupled Write Path is the core write infrastructure of yantrikdb, implementing a Log-Structured Merge-tree (LSM) architecture that separates write operations from indexing and compaction. This design ensures high write throughput while maintaining read performance through asynchronous background compaction.\n\nThe architecture is built on the principle that write operations must be as fast as possible, deferring expensive vector indexing work to background processes. This decoupled approach prevents write operations from blocking on indexing operations, enabling the system to handle high concurrency workloads without regression.\n\n资料来源：[CONCURRENCY.md]()\n\n## Architecture Components\n\n### DeltaIndex: The Write Buffer\n\nThe `DeltaIndex` is the primary write buffer in the LSM architecture. It provides lock-free append operations for new entries and tombstone operations for deletions.\n\n```mermaid\ngraph TD\n    subgraph WritePath[\"Write Path\"]\n        W[Write Request] --> DI[DeltaIndex]\n        DI --> |append| DE[DeltaEntry]\n        DE --> |O1 push| PendingVec[Pending Vec]\n    end\n    \n    subgraph CompactionPath[\"Background Compaction\"]\n        PendingVec --> |seal| CT[Cold Tier]\n        CT --> |clone-rebuild| NH[New HnswIndex]\n        NH --> |install| CurrentCold[Current Cold Tier]\n    end\n    \n    subgraph ReadPath[\"Read Path\"]\n        CurrentCold --> |ArcSwap| RR[Read Replicas]\n    end\n```\n\n**DeltaIndex Write Operations:**\n\n| Operation | Complexity | Lock Type | Description |\n|-----------|-----------|-----------|-------------|\n| `append` | O(1) | `RwLock<Vec<DeltaEntry>>` | Add new entry to pending buffer |\n| `tombstone` | O(1) | `RwLock<Vec<DeltaEntry>>` | Mark entry as deleted |\n| `seal_delta_for_compaction` | O(1) | Brief lock hold | Swap pending entries for compaction |\n| `compact` | O(n) rebuild | No foreground locks | Clone and rebuild cold tier |\n\n资料来源：[crates/yantrikdb-core/src/vector/delta_index.rs]()\n\n### Two-Tier Storage Model\n\nThe storage model consists of two tiers:\n\n1. **Hot Tier (DeltaIndex)**: Contains all recent writes and tombstones not yet compacted\n2. **Cold Tier (HnswIndex)**: Immutable, compacted index containing historical data\n\n```mermaid\ngraph LR\n    subgraph HotTier[\"Hot Tier - DeltaIndex\"]\n        D1[DeltaEntry 1]\n        D2[DeltaEntry 2]\n        D3[DeltaEntry N]\n    end\n    \n    subgraph ColdTier[\"Cold Tier - HnswIndex\"]\n        H1[HnswIndex<br/>immutable]\n        H2[HnswIndex<br/>immutable]\n    end\n    \n    HotTier --> |periodic| ColdTier\n```\n\n**Invariant:** The cold tier MUST use `ArcSwap<HnswIndex>` for lock-free reader access. Replacing with `RwLock<HnswIndex>` or `Mutex<HnswIndex>` causes read latency regression.\n\n资料来源：[CONCURRENCY.md]()\n\n## Concurrency Rules\n\nThe write path enforces strict concurrency rules to prevent deadlocks and ensure forward progress under high write load.\n\n### Rule 1: Foreground Writes Must Be O(1)\n\nAll foreground write operations MUST only touch O(1) data structures:\n\n| Allowed Operations | Forbidden Operations |\n|-------------------|---------------------|\n| `DeltaIndex::append` | `HnswIndex::insert` |\n| `DeltaIndex::tombstone` | `HnswIndex::remove` |\n| `assign_seq` (atomic fetch) | `compact()` |\n| `bump_visible_seq` | Any non-O(1) lock acquisition |\n\n资料来源：[CONCURRENCY.md]()\n\n### Rule 2: Background Compaction Isolation\n\nBackground compaction MUST NOT share lock primitives with foreground writes:\n\n```mermaid\nsequenceDiagram\n    participant FW as Foreground Write\n    participant DI as DeltaIndex\n    participant CP as Compactor\n    participant HI as HnswIndex\n    \n    FW->>DI: append(entry)\n    Note over DI: Brief RwLock write<br/>O(1) push\n    \n    CP->>DI: seal_delta_for_compaction()\n    Note over DI: Brief lock for seal\n    \n    CP->>CP: clone cold + sealed entries\n    Note over CP: No locks held here\n    \n    CP->>HI: ArcSwap new index\n    Note over HI: Brief lock for install\n```\n\n**Compactor Responsibilities:**\n\n1. Call `seal_delta_for_compaction()` to get a stable snapshot\n2. Perform HNSW rebuild off the hot path\n3. Install new cold tier via `ArcSwap`\n\n资料来源：[CONCURRENCY.md]()\n\n### Rule 3: Visible Sequence Tracking\n\nThe `visible_seq` map tracks the minimum sequence number visible to readers per namespace, enabling read-your-writes (RYW) semantics.\n\n```rust\n// Type: DashMap<String, AtomicU64>\nvisible_seq: DashMap<String, AtomicU64>\n\n// Fast path reads (lock-free)\nget(ns).map(|e| e.load(Acquire))\n\n// Fast path writes\nget(ns).fetch_max(seq, Release)\n```\n\n| Property | Value |\n|----------|-------|\n| Data Structure | `dashmap::DashMap<String, AtomicU64>` |\n| Read Path | Lock-free via `AtomicU64::load(Acquire)` |\n| Write Path | Lock-free via `AtomicU64::fetch_max(Release)` |\n| Scope | Per-namespace |\n\n资料来源：[CONCURRENCY.md]()\n\n## Write Operations\n\n### Standard Write Flow\n\n```mermaid\ngraph TD\n    Start[Write Request] --> Validate{Validate}\n    Validate --> |valid| Seq[assign_seq]\n    Validate --> |invalid| Reject[Reject]\n    \n    Seq --> SQL[SQL SAVEPOINT]\n    SQL --> Delta[DeltaIndex::append]\n    Delta --> Bump[bump_visible_seq]\n    Bump --> Commit[Commit Transaction]\n    Commit --> Done[Return to Client]\n    \n    Reject --> Fail[Return Error]\n```\n\n### Record With RID Pattern\n\nAll write operations follow the `record_with_rid` pattern:\n\n```rust\n// Pattern for all write primitives\nfn write_operation(&self, ...) -> Result<RecordId> {\n    // 1. SQL with SAVEPOINT for rollback\n    let rid = sql_transaction(|| {\n        // 2. Append to DeltaIndex (O(1) push)\n        self.delta_index.append(entry)?;\n        Ok(assigned_rid)\n    })?;\n    \n    // 3. Bump visible sequence\n    self.bump_visible_seq(namespace, seq)?;\n    \n    Ok(rid)\n}\n```\n\n资料来源：[CONCURRENCY.md]()\n\n### Sequence Number Assignment\n\nThe `assign_seq` function uses atomic operations for lock-free sequence generation:\n\n```rust\n// Atomic fetch_add or fetch_max\nlet seq = self\n    .seq_counter\n    .fetch_add(1, std::sync::atomic::Ordering::Relaxed);\n```\n\nThis ensures each write receives a unique, monotonically increasing sequence number without contention.\n\n## Compaction Process\n\n### Compaction Lifecycle\n\n```mermaid\ngraph LR\n    subgraph Phase1[\"Phase 1: Seal\"]\n        A[Active Delta] --> B[Seal Delta]\n        B --> C[Frozen Snapshot]\n    end\n    \n    subgraph Phase2[\"Phase 2: Rebuild\"]\n        C --> D[Clone Cold Hnsw]\n        D --> E[Merge Sealed Entries]\n        E --> F[Build New Hnsw]\n    end\n    \n    subgraph Phase3[\"Phase 3: Install\"]\n        F --> G[ArcSwap Install]\n        G --> H[New Current Cold]\n    end\n```\n\n### Compaction Rules\n\n| Rule | Description |\n|------|-------------|\n| Lock Isolation | Compactor holds `delta` RwLock only for seal and install |\n| No Hot Locks | Between seal and install, NO locks shared with foreground |\n| ArcSwap | Cold tier replacement uses atomic pointer swap |\n| Snapshot | `seal_delta_for_compaction()` returns stable Arc snapshot |\n\n资料来源：[CONCURRENCY.md]()\n\n### Compaction Triggers\n\nThe system triggers compaction based on configurable policies:\n\n| Trigger Type | Default Cooldown | Default Expiry |\n|-------------|-----------------|----------------|\n| DecayReview | 3 days | 7 days |\n| ConsolidationReady | 1 day | 3 days |\n| ConflictEscalation | 2 days | 14 days |\n| Redundancy | 1 day | 7 days |\n| PatternDiscovered | 7 days | 7 days |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n## Read-Your-Writes Semantics\n\n### Recall With Sequence\n\nThe `recall_with_seq` method enables clients to wait for their writes to become visible:\n\n```rust\npub fn recall_with_seq(\n    &self,\n    query_embedding: &[f32],\n    top_k: usize,\n    min_seq: u64,           // Sequence from write operation\n    namespace: Option<&str>,\n    timeout: Duration,\n) -> Result<Vec<RecallResult>> {\n    let ns = namespace.unwrap_or(\"default\");\n    \n    // Wait for visible_seq to reach min_seq\n    self.wait_for_visible_seq(ns, min_seq, timeout)?;\n    \n    // Safe to recall - all writes up to min_seq are visible\n    self.recall(query_embedding, top_k, ...)\n}\n```\n\n| Parameter | Type | Description |\n|-----------|------|-------------|\n| `min_seq` | `u64` | Minimum sequence number client observed |\n| `namespace` | `Option<&str>` | Target namespace (required for correct RYW) |\n| `timeout` | `Duration` | Maximum wait time |\n\n资料来源：[crates/yantrikdb-core/src/engine/recall.rs]()\n\n### Visible Sequence Wait\n\n```mermaid\nsequenceDiagram\n    participant C as Client\n    participant VS as VisibleSeq Map\n    participant REC as Recall Engine\n    \n    C->>VS: load current seq for namespace\n    Note over VS: AtomicU64 load\n    VS-->>C: current_seq\n    \n    alt current_seq < min_seq\n        C->>C: wait_for_visible_seq()\n        loop until visible or timeout\n            C->>VS: load current seq\n            VS-->>C: current_seq\n        end\n    end\n    \n    C->>REC: recall(...)\n    REC-->>C: Results (guaranteed visible)\n```\n\n## Materializer Integration\n\nThe materializer component coordinates between the write path and the cognitive layer, processing update operations extracted from natural language input.\n\n```mermaid\ngraph TD\n    subgraph Input[\"Input Processing\"]\n        NL[Natural Language] --> EX[Extractor]\n        EX --> OT[Operation Templates]\n    end\n    \n    subgraph Write[\"Write Path\"]\n        OT --> UW[UpdateOps]\n        UW --> DI[DeltaIndex]\n        DI --> SEQ[Sequence Assignment]\n        SEQ --> VS[VisibleSeq Update]\n    end\n    \n    subgraph Cognitive[\"Cognitive Layer\"]\n        VS --> MAT[Materializer]\n        MAT --> ST[State Update]\n        ST --> GP[Graph Propagation]\n    end\n```\n\n资料来源：[crates/yantrikdb-core/src/engine/materializer.rs]()\n资料来源：[crates/yantrikdb-core/src/cognition/extractor.rs]()\n\n## Configuration\n\n### ThinkConfig Parameters\n\nThe cognition loop configuration affects compaction behavior:\n\n| Parameter | Description | Impact |\n|-----------|-------------|--------|\n| `importance_threshold` | Minimum importance for processing | Filters low-value nodes |\n| `decay_threshold` | Decay rate trigger | Affects when entries move to cold |\n| `max_triggers` | Maximum triggers per cycle | Limits resource usage |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n### Memory Query Options\n\nThe Python bindings expose configuration for recall operations:\n\n```rust\n#[pyo3(signature = (\n    query=None, embedding=None, top_k=10, memory_type=None, namespace=None,\n    time_window=None, expand_entities=false, include_consolidated=false,\n    skip_reinforce=false, domain=None, source=None\n))]\n```\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `query` | `Option<&str>` | `None` | Text query for semantic search |\n| `embedding` | `Option<Vec<f32>>` | `None` | Pre-computed embedding vector |\n| `top_k` | `usize` | `10` | Number of results to return |\n| `memory_type` | `Option<&str>` | `None` | Filter by memory type |\n| `namespace` | `Option<&str>` | `None` | Target namespace |\n| `include_consolidated` | `bool` | `false` | Include cold tier results |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs]()\n\n## Performance Characteristics\n\n### Write Path Guarantees\n\n| Metric | Guarantee |\n|--------|-----------|\n| Write Latency | O(1) for DeltaIndex append |\n| Contention | Lock-free sequence assignment |\n| Durability | SQL SAVEPOINT + DeltaIndex |\n| Visibility | Guaranteed via visible_seq |\n\n### Compaction Guarantees\n\n| Metric | Guarantee |\n|--------|-----------|\n| Lock Duration | O(1) for seal and install |\n| Hot Path Impact | Zero locks during rebuild |\n| Reader Impact | ArcSwap provides instant switch |\n| Memory | Clone-on-write for cold tier |\n\n### Read Path Guarantees\n\n| Metric | Guarantee |\n|--------|-----------|\n| Read Latency | Lock-free via ArcSwap cold tier |\n| Consistency | Read-your-writes via visible_seq |\n| Namespace Isolation | Per-namespace sequence tracking |\n\n## Error Handling\n\n### Conflict Resolution\n\nThe system tracks conflicts between memories for resolution:\n\n```rust\npub struct Conflict {\n    pub conflict_id: String,\n    pub conflict_type: String,        // identity_fact, preference, temporal\n    pub priority: String,              // critical, high, medium, low\n    pub memory_a: String,\n    pub memory_b: String,\n    pub entity: Option<String>,\n    pub detected_at: f64,\n    pub resolution_note: Option<String>,\n}\n```\n\n| Conflict Type | Default Priority |\n|--------------|------------------|\n| IdentityFact | critical |\n| Preference | high |\n| Temporal | high |\n| Consolidation | medium |\n| Minor | low |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n## Related Documentation\n\n- [Concurrency Rules](https://github.com/yantrikos/yantrikdb/blob/main/CONCURRENCY.md) - Detailed concurrency invariants\n- [Decoupled Write Path RFC](https://github.com/yantrikos/yantrikdb/blob/main/docs/decoupled_write_path_rfc.md) - Design rationale\n- [DeltaIndex Implementation](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/vector/delta_index.rs) - Source code\n- [Materializer](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/materializer.rs) - Write coordination\n\n---\n\n<a id='page-storage-engine'></a>\n\n## Storage Engine\n\n### 相关页面\n\n相关主题：[Five-Index Architecture](#page-five-index-architecture), [Decoupled Write Path (LSM Architecture)](#page-decoupled-write-path)\n\n<details>\n<summary>Relevant Source Files</summary>\n\nThe following source files were used to generate this documentation:\n\n- [crates/yantrikdb-core/src/engine/lifecycle.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/lifecycle.rs)\n- [crates/yantrikdb-core/src/engine/stats.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/stats.rs)\n- [crates/yantrikdb-core/src/py_engine/memory.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/src/py_engine/memory.rs)\n- [crates/yantrikdb-core/src/cognition/state.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n- [crates/yantrikdb-core/src/base/types.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/types.rs)\n- [crates/yantrikdb-core/src/distributed/conflict.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/distributed/conflict.rs)\n</details>\n\n# Storage Engine\n\n## Overview\n\nThe YantrikDB Storage Engine is the core persistence layer responsible for storing, retrieving, and managing memory data in the SQLite database. It handles encrypted text storage, metadata management, storage tier organization, and integrates with the materialization subsystem for asynchronous processing of memory operations.\n\nThe storage engine operates as part of the broader engine module and maintains close integration with:\n\n- The **Record System** for writing memories\n- The **Recall System** for querying memories\n- The **Materialization Pipeline** for asynchronous operation processing\n- The **Encryption Layer** for secure text storage\n\n## Core Data Models\n\n### Memory Structure\n\nThe central data structure managed by the storage engine is the `Memory` struct, which encapsulates all attributes of a stored memory:\n\n```rust\npub struct Memory {\n    pub rid: String,                      // Unique record identifier\n    pub memory_type: String,             // episodic, semantic, procedural, etc.\n    pub text: String,                    // Decrypted memory content\n    pub created_at: f64,                 // Creation timestamp\n    pub importance: f64,                 // Importance score [0.0, 1.0]\n    pub valence: f64,                    // Emotional valence [-1.0, 1.0]\n    pub half_life: f64,                  // Decay half-life in seconds\n    pub last_access: f64,                // Last access timestamp\n    pub access_count: u32,               // Number of times accessed\n    pub consolidation_status: String,     // Current consolidation state\n    pub storage_tier: String,             // hot, warm, cold, frozen\n    pub consolidated_into: Option<String>,// RID of consolidated memory\n    pub metadata: serde_json::Value,     // Encrypted JSON metadata\n    pub namespace: String,               // Logical namespace partition\n    pub certainty: f64,                  // Belief certainty [0.0, 1.0]\n    pub domain: String,                   // Domain classification\n    pub source: String,                   // Provenance source type\n    pub emotional_state: Option<String>, // Associated emotional context\n    pub session_id: Option<String>,      // Session identifier\n    pub due_at: Option<f64>,             // Due timestamp for tasks\n    pub temporal_kind: Option<String>,   // Temporal classification\n}\n```\n\n资料来源：[engine/lifecycle.rs:200-225]()\n\n### Storage Tiers\n\nYantrikDB implements a tiered storage architecture to optimize memory access patterns:\n\n| Tier | Purpose | Access Pattern |\n|------|---------|----------------|\n| `hot` | Frequently accessed memories | In-memory cache priority |\n| `warm` | Regular operational memories | Standard retrieval |\n| `cold` | Archival memories | Lazy loading |\n| `frozen` | Long-term storage | Minimal access |\n\n资料来源：[engine/lifecycle.rs:214]()\n\n### Consolidation Status\n\nMemories maintain a consolidation status indicating their state in the memory consolidation lifecycle:\n\n| Status | Description |\n|--------|-------------|\n| `observed` | Raw observation, no consolidation |\n| `inferred` | Pattern-based inference |\n| `told` | Explicitly stated by user |\n| `experimented` | Confirmed via experiment |\n| `extracted` | Extracted from external documents |\n| `consolidated` | Merged from multiple sources |\n| `system_default` | System-provided default |\n\nEach status carries a reliability prior that affects belief revision:\n\n```rust\npub fn reliability_prior(self) -> f64 {\n    match self {\n        Self::Told => 0.95,          // User explicitly stated\n        Self::Observed => 0.90,      // Directly observed\n        Self::Experimented => 0.85,  // Controlled experiment\n        Self::Extracted => 0.75,     // External documents\n        Self::Inferred => 0.60,      // Pattern inference\n        Self::Consolidated => 0.80,  // Multi-source merge\n        Self::SystemDefault => 0.50, // Defaults\n    }\n}\n```\n\n资料来源：[cognition/state.rs:180-192]()\n\n## Storage Architecture\n\n### High-Level Architecture\n\n```mermaid\ngraph TD\n    subgraph \"Python API Layer\"\n        PYM[py_engine/memory.rs]\n    end\n    \n    subgraph \"Engine Core\"\n        REC[Record System]\n        RCL[Recall System]\n        MAT[Materialization Pipeline]\n    end\n    \n    subgraph \"Storage Layer\"\n        ENG[Engine Instance]\n        SQL[(SQLite Database)]\n        CRE[Encryption Layer]\n    end\n    \n    PYM --> REC\n    PYM --> RCL\n    REC --> ENG\n    REC --> MAT\n    MAT --> ENG\n    ENG --> SQL\n    ENG --> CRE\n    CRE --> SQL\n```\n\n### Encryption Integration\n\nText fields are encrypted before storage and decrypted on retrieval to ensure data privacy:\n\n```rust\nlet text = self.decrypt_text(&row.2)?;\nlet meta_str = self.decrypt_text(&row.12)?;\nlet metadata: serde_json::Value = serde_json::from_str(&meta_str)\n    .unwrap_or(serde_json::Value::Object(Default::default()));\n```\n\n资料来源：[engine/lifecycle.rs:210-214]()\n\n## Record Operations\n\n### Recording a Memory\n\nThe storage engine provides the `record()` method for storing new memories with automatic embedding:\n\n```rust\ndb.record(\n    text,           // Memory content\n    memory_type,    // episodic, semantic, etc.\n    importance,     // Importance score\n    valence,        // Emotional valence\n    half_life,      // Decay half-life\n    &meta,          // JSON metadata\n    &emb,           // Embedding vector\n    namespace,      // Logical partition\n    certainty,      // Belief certainty\n    domain,         // Domain classification\n    source,         // Provenance source\n    emotional_state, // Emotional context\n)\n```\n\n资料来源：[py_engine/memory.rs:45-60]()\n\n### Python Bindings\n\nThe Python API exposes record functionality through `py_engine/memory.rs`:\n\n```python\n# Record a memory with auto-embedding\ndb.record(\n    text=\"Meeting with John at 3pm\",\n    memory_type=\"episodic\",\n    namespace=\"work\",\n    importance=0.8,\n    valence=0.5,\n)\n```\n\nThe `record()` method accepts these parameters:\n\n| Parameter | Type | Required | Description |\n|-----------|------|----------|-------------|\n| `text` | `str` | Yes | Memory content |\n| `embedding` | `Vec<f32>` | No | Pre-computed embedding (auto-generated if None) |\n| `memory_type` | `str` | No | Type classification |\n| `namespace` | `str` | No | Logical partition |\n| `importance` | `float` | No | Importance score (0.0-1.0) |\n| `valence` | `float` | No | Emotional valence (-1.0 to 1.0) |\n| `half_life` | `float` | No | Decay half-life in seconds |\n| `metadata` | `dict` | No | Additional JSON metadata |\n| `certainty` | `float` | No | Belief certainty |\n| `domain` | `str` | No | Domain classification |\n| `source` | `str` | No | Provenance source |\n| `emotional_state` | `str` | No | Emotional context |\n\n资料来源：[py_engine/memory.rs:30-60]()\n\n## Recall Operations\n\n### Querying Memories\n\nThe recall system retrieves memories based on embedding similarity and filters:\n\n```rust\ndb.recall(\n    &emb,                    // Query embedding\n    top_k,                   // Number of results\n    time_window,             // Optional time filter\n    memory_type,             // Type filter\n    include_consolidated,    // Include consolidated memories\n    expand_entities,         // Expand entity references\n    query,                   // Optional text query\n    skip_reinforce,          // Skip reinforcement learning\n    namespace,               // Namespace filter\n    domain,                  // Domain filter\n    source,                  // Source filter\n)\n```\n\n资料来源：[py_engine/memory.rs:105-120]()\n\n### Recall Query Builder\n\nThe `RecallQuery` struct provides a fluent interface for building recall queries:\n\n```rust\nlet mut q = yantrikdb_core::RecallQuery::new(emb).top_k(top_k);\nif let Some(mt) = memory_type {\n    q = q.memory_type(mt);\n}\nif let Some(ns) = namespace {\n    q = q.namespace(ns);\n}\nif let Some(tw) = time_window {\n    q = q.time_window(tw.0, tw.1);\n}\n```\n\n资料来源：[py_engine/memory.rs:145-155]()\n\n### Recall Parameters\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `query` | `Option<&str>` | `None` | Text query |\n| `query_embedding` | `Option<Vec<f32>>` | `None` | Pre-computed embedding |\n| `top_k` | `usize` | `10` | Number of results |\n| `time_window` | `Option<(f64, f64)>` | `None` | Time range filter |\n| `memory_type` | `Option<&str>` | `None` | Memory type filter |\n| `include_consolidated` | `bool` | `false` | Include consolidated |\n| `expand_entities` | `bool` | `true` | Expand entity references |\n| `skip_reinforce` | `bool` | `false` | Skip reinforcement |\n| `namespace` | `Option<&str>` | `None` | Namespace filter |\n| `domain` | `Option<&str>` | `None` | Domain filter |\n| `source` | `Option<&str>` | `None` | Source filter |\n\n资料来源：[py_engine/memory.rs:65-80]()\n\n## Materialization Pipeline\n\n### Overview\n\nThe materialization pipeline handles asynchronous processing of memory operations to ensure durability and consistency. It operates in phases:\n\n```mermaid\ngraph LR\n    A[Write Ops] --> B[Phase 3: Record/Rollback]\n    B --> C[Phase 4.1: Materialize Pending]\n    C --> D[Phase 4.2: Apply Updates]\n    D --> E[Phase 4.3: Post-Record Materialization]\n    E --> F[Applied]\n```\n\n### Phase 3: Record Materialization\n\nHandles synchronous recording of operations with rollback capability:\n\n```rust\n\"record\" | \"forget\" | \"relate\" | \"correct\" | \"consolidate\" => {\n    tracing::trace!(\n        target: \"yantrikdb::ingest::materialize\",\n        op_id = %op_id,\n        op_type = %op_type,\n        \"phase 3 stub: marking pending op as applied without inline materialization\"\n    );\n    if self.mark_op_applied(op_id)? {\n        applied += 1;\n    }\n}\n```\n\n资料来源：[engine/stats.rs:95-105]()\n\n### Phase 4.1-4.2: Update Operations\n\nFor update operations like task completion and status changes:\n\n```rust\n\"create_task\" | \"update_task_status\" | \"create_goal\" | \"update_goal\" \n| \"record_belief\" | \"relate_belief\" | \"update_preference\" \n| \"record_need\" | \"record_emotion\" => {\n    // Attempt materialization\n    match materialize_fn(payload) {\n        Ok(()) => {\n            if self.mark_op_applied(op_id)? {\n                applied += 1;\n            }\n        }\n        Err(e) => {\n            tracing::warn!(\n                target: \"yantrikdb::ingest::materialize\",\n                op_id = %op_id,\n                error = %e,\n                \"post-record-with-rid materialization failed; leaving pending for retry\"\n            );\n        }\n    }\n}\n```\n\n资料来源：[engine/stats.rs:70-90]()\n\n### Phase 4.3: Post-Record Materialization\n\nHandles entity and relation extraction that runs on the materializer thread to avoid blocking the foreground caller:\n\n> Mirrors the post-INSERT entity/relation extraction loop that used to live on the foreground `record()` path. Now runs on the materializer thread so the foreground caller is not blocked on the unbounded loop count.\n\n资料来源：[engine/stats.rs:130-135]()\n\n## Conflict Detection\n\nThe storage engine integrates with the conflict detection system for distributed scenarios:\n\n```rust\n// Phase 2: Evaluate each candidate pair with policy awareness\nfor (src, rel_type, dst1, dst2, vf1, vt1, vf2, vt2, namespace) in &candidates {\n    if conflicts.len() >= max_conflicts {\n        break;\n    }\n\n    // RFC 006 Phase 3: check relation policy before flagging\n    let policy: Option<(bool, bool, String)> = {\n        let conn = db.conn();\n        conn.query_row(\n            \"SELECT overlap_allowed, temporal_required, missing_time_severity \\\n             FROM relation_policies \\\n             WHERE relation_type = ?1 AND (namespace = ?2 OR namespace = '*') \\\n             ORDER BY CASE WHEN namespace = ?2 THEN 0 ELSE 1 END \\\n             LIMIT 1\",\n            params![rel_type, namespace],\n            |row| { ... }\n        )\n    };\n}\n```\n\n资料来源：[distributed/conflict.rs:85-105]()\n\n### Conflict Types\n\n| Type | Priority | Description |\n|------|----------|-------------|\n| `identity_fact` | Critical | Contradiction in core facts |\n| `preference` | High | Preference conflict |\n| `temporal` | High | Time-based conflict |\n| `consolidation` | Medium | Consolidation conflict |\n| `minor` | Low | Minor inconsistency |\n\n资料来源：[base/types.rs:50-60]()\n\n## Retrieval by ID\n\nThe `get_memory_by_rid()` method retrieves a specific memory by its record ID:\n\n```rust\npub fn get_memory_by_rid(&self, rid: &str) -> Result<Option<Memory>> {\n    let result = conn.query_row(\n        \"SELECT rid, memory_type, text, created_at, importance, valence,\n                half_life, last_access, access_count, consolidation_status,\n                storage_tier, consolidated_into, metadata, namespace,\n                certainty, domain, source, emotional_state, session_id,\n                due_at, temporal_kind\n         FROM memories WHERE rid = ?1\",\n        params![rid],\n        |row| Ok((...))  // 21 columns mapped\n    )?;\n    \n    // Decrypt and deserialize\n    let text = self.decrypt_text(&row.2)?;\n    let meta_str = self.decrypt_text(&row.12)?;\n    let metadata: serde_json::Value = serde_json::from_str(&meta_str)?;\n    \n    Ok(Some(Memory { ... }))\n}\n```\n\n资料来源：[engine/lifecycle.rs:195-225]()\n\n## Query Interface\n\nThe storage engine provides a flexible query interface combining text and embedding search:\n\n```python\n# Query memories with combined text and embedding search\nresults = db.query(\n    query=\"team meeting\",\n    embedding=None,  # Auto-generate from query\n    top_k=10,\n    memory_type=\"episodic\",\n    namespace=\"work\",\n    time_window=(start_ts, end_ts),\n    expand_entities=True,\n    include_consolidated=False,\n)\n```\n\n资料来源：[py_engine/memory.rs:85-100]()\n\n### Query vs Recall\n\n| Aspect | `query()` | `recall()` |\n|--------|-----------|------------|\n| Purpose | Combined text + embedding search | Pure embedding similarity |\n| Use Case | Exploratory queries | Memory association |\n| Parameters | Query text or embedding | Primarily embedding |\n| Filters | Full filter suite | Full filter suite |\n\n## Error Handling\n\nThe storage engine uses Rust's `Result` type for error handling with the following patterns:\n\n```rust\n.ok_or_else(|| PyRuntimeError::new_err(\"YantrikDB is closed\"))\n```\n\nErrors are propagated through the Python bindings using the `map_err` function which converts Rust errors to Python exceptions.\n\n资料来源：[py_engine/memory.rs:40]()\n\n## Performance Considerations\n\n### Memory Retrieval Optimization\n\n1. **Encryption on-demand**: Text fields are only decrypted when accessed\n2. **Lazy metadata parsing**: JSON metadata is parsed only when needed\n3. **Storage tiering**: Frequently accessed memories can be promoted to hot tier\n4. **Consolidation filtering**: `include_consolidated=false` skips consolidation lookups\n\n### Asynchronous Materialization\n\nPost-record operations run on a background materializer thread to prevent foreground blocking:\n\n> Now runs on the materializer thread so the foreground caller is not blocked on the unbounded loop count (5-15...)\n\n资料来源：[engine/stats.rs:135-138]()\n\n## Related Systems\n\n| System | Integration Point | Purpose |\n|--------|-------------------|---------|\n| Record System | `record()` | Memory creation |\n| Recall System | `recall()`, `query()` | Memory retrieval |\n| Materialization | `materialize_ops()` | Async operation processing |\n| Conflict Detection | `relation_policies` table | Distributed consistency |\n| Encryption | `decrypt_text()` | Data security |\n\n## API Reference Summary\n\n### Core Methods\n\n| Method | File | Purpose |\n|--------|------|---------|\n| `record()` | py_engine/memory.rs | Store new memory |\n| `recall()` | py_engine/memory.rs | Retrieve by embedding |\n| `query()` | py_engine/memory.rs | Combined search |\n| `get_memory_by_rid()` | engine/lifecycle.rs | Lookup by ID |\n| `materialize_ops()` | engine/stats.rs | Process pending ops |\n\n### Data Structures\n\n| Struct | File | Purpose |\n|--------|------|---------|\n| `Memory` | engine/lifecycle.rs | Core memory representation |\n| `RecallQuery` | py_engine/memory.rs | Query builder |\n| `Conflict` | base/types.rs | Conflict representation |\n\n---\n\n<a id='page-core-api'></a>\n\n## Core API Reference\n\n### 相关页面\n\n相关主题：[Overview](#page-overview), [Cognition Layer](#page-cognition-layer), [Conflict Detection and Resolution](#page-conflict-resolution)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-core/src/engine/mod.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/mod.rs)\n- [crates/yantrikdb-core/src/engine/record.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/record.rs)\n- [crates/yantrikdb-core/src/engine/recall.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/recall.rs)\n- [crates/yantrikdb-core/src/engine/graph_ops.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/graph_ops.rs)\n- [crates/yantrikdb-core/src/base/scoring.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/scoring.rs)\n</details>\n\n# Core API Reference\n\nThis page documents the core engine API of yantrikdb, covering the fundamental operations for memory storage, retrieval, graph relationships, and cognitive processing.\n\n## Overview\n\nThe Core API provides the foundational primitives for building personal memory systems. It consists of four primary subsystems:\n\n| Subsystem | Purpose |\n|-----------|---------|\n| **Record** | Store memories with importance, valence, and metadata |\n| **Recall** | Semantic search and retrieval using embeddings |\n| **Graph** | Relationship management between memory nodes |\n| **Cognitive** | Higher-order reasoning (think loop, conflict detection) |\n\n资料来源：[crates/yantrikdb-core/src/engine/mod.rs]()\n\n### Architecture Overview\n\n```mermaid\ngraph TD\n    A[User Input] --> B[Record API]\n    A --> C[Recall API]\n    A --> D[Graph API]\n    B --> E[(SQLite Storage)]\n    C --> E\n    D --> E\n    E --> F[Cognitive Engine]\n    F --> G[Think Loop]\n    G --> H[Consolidation]\n    G --> I[Conflict Detection]\n    G --> J[Pattern Mining]\n```\n\n## Memory Data Model\n\n### Core Memory Structure\n\nThe fundamental unit of storage in yantrikdb is the `Memory` struct, defined in `lifecycle.rs`:\n\n| Field | Type | Description |\n|-------|------|-------------|\n| `rid` | `String` | Unique resource identifier |\n| `memory_type` | `String` | Type classification (episodic, semantic, procedural) |\n| `text` | `String` | The actual memory content (encrypted at rest) |\n| `created_at` | `f64` | Unix timestamp of creation |\n| `importance` | `f64` | Significance score [0.0, 1.0] |\n| `valence` | `f64` | Emotional valence [-\nError with Openai API: output new_sensitive (1027)\n\nPlease check that you have set the OPENAI_API_KEY environment variable with a valid API key.\n\n---\n\n<a id='page-cognition-layer'></a>\n\n## Cognition Layer\n\n### 相关页面\n\n相关主题：[Core API Reference](#page-core-api), [Conflict Detection and Resolution](#page-conflict-resolution)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-core/src/cognition/state.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n- [crates/yantrikdb-core/src/cognition/query_dsl.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/query_dsl.rs)\n- [crates/yantrikdb-core/src/engine/query_dsl.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/query_dsl.rs)\n- [crates/yantrikdb-core/src/cognition/narrative.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/narrative.rs)\n- [crates/yantrikdb-core/src/cognition/receptivity.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/receptivity.rs)\n- [crates/yantrikdb-core/src/cognition/surfacing.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/surfacing.rs)\n- [crates/yantrikdb-core/src/cognition/extractor.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/extractor.rs)\n- [crates/yantrikdb-core/src/base/types.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/types.rs)\n</details>\n\n# Cognition Layer\n\nThe Cognition Layer is the reasoning and knowledge management subsystem of yantrikdb. It provides cognitive operations for belief revision, goal planning, intent recognition, pattern detection, and proactive user assistance. The layer orchestrates a pipeline of cognitive operators that process user interactions, external observations, and system events to maintain a dynamic model of user needs, goals, and preferences.\n\n## Architecture Overview\n\nThe Cognition Layer operates as a staged pipeline that transforms raw observations into structured cognitive entities (beliefs, goals, tasks, routines, intents) and surfaces actionable insights to the user at appropriate moments.\n\n```mermaid\ngraph TD\n    subgraph Input\n        Obs[User Observation] --> Extr[Extractor]\n        Ev[Evidence Input] --> Extr\n    end\n    \n    subgraph \"Cognitive Pipeline\"\n        Extr --> Ops[Operator Pipeline]\n        Ops --> Attend[Attend]\n        Ops --> Recall[Recall]\n        Ops --> Believe[Believe]\n        Ops --> Compare[Compare]\n        Ops --> Plan[Plan]\n        Ops --> Project[Project]\n        Ops --> Anticipate[Anticipate]\n        Ops --> Assess[Assess]\n        Ops --> Coherence[Coherence Check]\n    end\n    \n    subgraph \"Working Memory\"\n        Attend --> WS[Working Set]\n        Recall --> WS\n    end\n    \n    subgraph \"Long-term Store\"\n        WS <--> KG[Knowledge Graph]\n        KG --> Beliefs[Beliefs]\n        KG --> Goals[Goals]\n        KG --> Routines[Routines]\n    end\n    \n    subgraph \"Output\"\n        Coherence --> Surf[Surfacing]\n        Surf --> Suggest[Proactive Suggestion]\n        Surf --> SurfaceMode[Surface Modes]\n    end\n```\n\n## Node Types\n\nThe Cognition Layer manages a graph of cognitive nodes, each representing a distinct aspect of user state and knowledge.\n\n### NodeKind Classification\n\n| Kind | Description | Persistence | Typical Confidence | Typical Activation |\n|------|-------------|-------------|-------------------|-------------------|\n| Entity | Real-world objects, people, concepts | Yes | 0.70 | 0.70 |\n| Episode | Past experiences and events | Yes | 0.70 | 0.60 |\n| Belief | User's mental models and facts | Yes | 0.70 | 0.70 |\n| Goal | Desired outcomes | Yes | 0.85 | 0.75 |\n| Task | Concrete action items | Yes | 0.90 | 0.80 |\n| IntentHypothesis | Inferred user wants (transient) | No | 0.60 | 0.50 |\n| Routine | Recurring behavioral patterns | Yes | 0.70 | 0.50 |\n| Need | User requirements (Maslow-based) | Yes | 0.60 | 0.70 |\n| Opportunity | Time-bounded chances for action | Yes | 0.40 | 0.60 |\n| Risk | Potential problems | Yes | 0.40 | 0.70 |\n| Constraint | Boundaries and rules | Yes | 0.90 | 0.80 |\n| Preference | User choices and inclinations | Yes | 0.60 | 0.50 |\n| ConversationThread | Dialogue context (transient) | No | 0.90 | 0.80 |\n| ActionSchema | Reusable action templates | Yes | 0.70 | 0.40 |\n\n资料来源：[state.rs:350-370](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n### Cognitive Attributes\n\nEvery cognitive node carries a universal attribute set defining its dynamic state:\n\n| Attribute | Range | Description |\n|-----------|-------|-------------|\n| confidence | [0.0, 1.0] | Trust level in the node's accuracy |\n| activation | [0.0, 1.0] | Current spreading activation energy |\n| salience | [0.0, 1.0] | Prominence in user's attention |\n| persistence | [0.0, 1.0] | How long this node stays relevant |\n| valence | [-1.0, 1.0] | Emotional tone (negative to positive) |\n| urgency | [0.0, 1.0] | Time-critical nature |\n| novelty | [0.0, 1.0] | How surprising/unexpected (decays with repetition) |\n| volatility | [0.0, 1.0] | Rate of attribute change |\n| evidence_count | u32 | Number of supporting observations |\n| provenance | ProvenanceType | Source reliability of this node |\n\n资料来源：[state.rs:280-330](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Provenance System\n\nThe provenance system tracks the source and reliability of cognitive nodes, enabling appropriate trust calibration during reasoning.\n\n### Provenance Types\n\n| Type | Reliability Prior | Description |\n|------|------------------|-------------|\n| Told | 0.95 | User explicitly stated |\n| Observed | 0.90 | Directly observed behavior |\n| Experimented | 0.85 | Confirmed via controlled experiment |\n| Consolidated | 0.80 | Merged from multiple sources |\n| Extracted | 0.75 | From external documents |\n| Inferred | 0.60 | Pattern-based inference |\n| SystemDefault | 0.50 | Default values (weakest) |\n\n资料来源：[state.rs:220-240](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Edge Types and Activation Spreading\n\nRelationships between cognitive nodes are represented as typed edges with associated activation transfer coefficients that govern spreading activation dynamics.\n\n| Edge Type | Transfer | Description |\n|-----------|----------|-------------|\n| Causes | 0.8 | Strong causal relationship |\n| Supports | 0.7 | Confirms or strengthens target |\n| Triggers | 0.7 | Initiates target activation |\n| AdvancesGoal | 0.6 | Progresses toward goal |\n| Requires | 0.5 | Prerequisite relationship |\n| Predicts | 0.4 | Anticipatory relationship |\n| SubtaskOf | 0.4 | Decomposition hierarchy |\n| AssociatedWith | 0.3 | Weak contextual link |\n| SimilarTo | 0.3 | Analogy relationship |\n| InstanceOf | 0.3 | Classification relationship |\n| PartOf | 0.3 | Compositional relationship |\n| Prefers | 0.3 | Preference indicator |\n| PrecedesTemporally | 0.2 | Temporal ordering |\n| Contradicts | -0.4 | Mutual exclusion |\n| Prevents | -0.6 | Active blocking |\n| BlocksGoal | -0.7 | Prevents goal achievement |\n| Avoids | -0.5 | Negative preference |\n| Constrains | -0.5 | Imposes limitation |\n\n资料来源：[state.rs:150-180](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Cognitive Operators\n\nThe reasoning pipeline executes a sequence of cognitive operators in priority order. Each operator performs a specific reasoning function.\n\n### Operator Priorities\n\n| Operator | Priority | Rationale |\n|----------|----------|-----------|\n| Attend | 10 | Foundation — always run |\n| Recall | 9 | Critical for context |\n| Believe | 8 | Evidence integration |\n| Compare | 7 | Action selection |\n| Constrain | 7 | Safety — always run if comparing |\n| Plan | 6 | Means-ends reasoning |\n| Project | 5 | Forward simulation |\n| Anticipate | 4 | Proactive — nice to have |\n| Assess | 3 | Meta — can skip under pressure |\n| CoherenceCheck | 2 | Maintenance — skip if budget tight |\n\n资料来源：[query_dsl.rs:40-55](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/query_dsl.rs)\n\n### Attend Operator\n\nThe Attend operator focuses attention on seed nodes and propagates activation through the knowledge graph.\n\n```rust\npub struct AttendOp {\n    pub seeds: Vec<NodeId>,      // Starting nodes for activation\n    pub max_hops: u32,           // Maximum propagation depth\n    pub decay: f64,              // Activation decay per hop\n}\n```\n\n资料来源：[query_dsl.rs:65-70](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/query_dsl.rs)\n\n**Execution behavior:**\n- Seeds receive a +0.3 activation boost (capped at 1.0)\n- Activation spreads through edges with configurable decay\n- Returns count of activated nodes and top-activated node list\n\n资料来源：[engine/query_dsl.rs:180-210](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/query_dsl.rs)\n\n### Recall Operator\n\nThe Recall operator retrieves relevant memories from long-term storage into the working set.\n\n```rust\npub struct RecallOp {\n    pub top_k: usize,           // Maximum results\n    pub query: Option<String>,  // Text query\n    pub domain: Option<String>, // Filter by domain\n}\n```\n\n### Believe Operator\n\nThe Believe operator integrates new evidence into the belief system using Bayesian revision.\n\n```rust\npub struct BelieveOp {\n    pub evidence: EvidenceInput,  // New observation to integrate\n}\n\npub struct EvidenceInput {\n    pub target: Option<NodeId>,   // Target belief or create new\n    pub observation: String,       // The evidence\n    pub direction: f64,           // +1 = confirming, -1 = contradicting\n}\n```\n\n### Compare Operator\n\nCompares candidate actions or beliefs against constraints and preferences to select optimal choices.\n\n### Plan Operator\n\nExecutes means-ends reasoning to generate action sequences that advance specified goals.\n\n### Project Operator\n\nPerforms forward simulation to predict outcomes of potential action sequences.\n\n### Anticipate Operator\n\nIdentifies opportunities and risks based on current state and detected patterns.\n\n### Assess Operator\n\nEvaluates overall system health, belief consistency, and goal progress.\n\n### Coherence Check\n\nValidates logical consistency across beliefs and detects conflicting information.\n\n## User State Modeling\n\n### Activity Type\n\nThe system tracks the user's current activity level to calibrate interruption costs and suggestion timing.\n\n| Activity | Interruption Cost | Description |\n|----------|------------------|-------------|\n| Idle | 0.10 | No active task |\n| JustReturned | 0.30 | Recently resumed work |\n| Browsing | 0.35 | Passive consumption |\n| Communicating | 0.45 | In conversation |\n| TaskSwitching | 0.55 | Mid-task context switch |\n| FocusedWork | 0.75 | Concentration mode |\n| DeepFocus | 0.95 | Immersive concentration |\n\n资料来源：[receptivity.rs:25-50](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/receptivity.rs)\n\n### Need Categories\n\nUser needs are classified according to a needs-based taxonomy:\n\n| Category | Description |\n|----------|-------------|\n| Informational | Knowledge and learning needs |\n| Social | Connection and relationship needs |\n| Emotional | Wellbeing and mood management |\n| Organizational | Structure and order needs |\n| Creative | Expression and innovation |\n| Health | Physical wellbeing |\n| Financial | Economic security |\n| Professional | Career and productivity |\n\n资料来源：[state.rs:10-25](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Action Types\n\nCognitive agents can perform actions of different kinds, each with associated base costs:\n\n| Action | Base Cost | Description |\n|--------|-----------|-------------|\n| Abstain | 0.00 | Do nothing |\n| Inform | 0.05 | Passive information delivery |\n| Organize | 0.10 | Structure and categorization |\n| Suggest | 0.15 | Propose without commitment |\n| Communicate | 0.20 | Direct user interaction |\n| Schedule | 0.25 | Time management |\n| Warn | 0.30 | Alert about risks |\n| Execute | 0.40 | Take automated action |\n\n资料来源：[state.rs:100-130](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Task Lifecycle\n\nTasks move through a defined status workflow:\n\n```mermaid\ngraph LR\n    P[Pending] --> IP[InProgress]\n    IP --> C[Completed]\n    IP --> B[Blocked]\n    P --> CAN[Cancelled]\n    B --> IP\n    CAN --> P\n```\n\n### Task Status\n\n| Status | String Value | Description |\n|--------|--------------|-------------|\n| Pending | pending | Not yet started |\n| InProgress | in_progress | Currently being worked |\n| Completed | completed | Successfully finished |\n| Cancelled | cancelled | Abandoned without completion |\n| Blocked | blocked | Waiting on prerequisites |\n\n资料来源：[state.rs:200-230](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Surfacing System\n\nThe surfacing system determines when and how to present proactive suggestions to the user.\n\n### Surface Modes\n\n| Mode | Description |\n|------|-------------|\n| Immediate | Show right now |\n| Soon | Show within current context |\n| Queued | Add to notification queue |\n| Background | Process but don't interrupt |\n\n### Suppression Reasons\n\nSuggestions may be suppressed for various reasons:\n\n| Reason | Description |\n|--------|-------------|\n| LowReceptivity | User is busy |\n| ItemSuppressionRule | User preference to hide |\n| QuietHours | Outside allowed hours |\n| RateLimited | Too frequent |\n| AntiNag | Already dismissed |\n| MaxSurfaces | Budget exhausted |\n| TooSoon | Recently surfaced |\n| NotificationModeBlock | DND enabled |\n\n资料来源：[surfacing.rs:15-30](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/surfacing.rs)\n\n### ProactiveSuggestion Structure\n\n```rust\npub struct ProactiveSuggestion {\n    pub agenda_id: AgendaId,       // Source agenda item\n    pub description: String,       // Human-readable text\n    pub kind: AgendaKind,          // Type of open loop\n    pub mode: SurfaceMode,         // How prominently to show\n    pub reason: SurfaceReason,     // Why being surfaced\n    pub confidence: f64,           // Relevance score [0,1]\n    pub urgency: f64,              // Time sensitivity [0,1]\n}\n```\n\n## Conflict Detection\n\nThe system detects and manages conflicts between memories and beliefs.\n\n### Conflict Types\n\n| Type | Default Priority | Description |\n|------|-----------------|-------------|\n| IdentityFact | critical | Core identity contradiction |\n| Preference | high | Preference inconsistency |\n| Temporal | high | Time-based conflict |\n| Consolidation | medium | Merge conflict |\n| Minor | low | Minor inconsistency |\n\n资料来源：[types.rs:180-210](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/types.rs)\n\n### Conflict Resolution\n\nConflicts are resolved through a policy-aware process that checks namespace-specific policies before flagging inconsistencies. Resolution strategies include:\n\n- **Timestamp-based**: Newer observation wins\n- **Source-based**: Higher provenance reliability wins\n- **Evidence count**: More supporting observations wins\n- **Manual resolution**: User intervention required for critical conflicts\n\n## Natural Language Understanding\n\nThe extractor component converts free-text observations into structured cognitive operations:\n\n| Template | Resulting Operation |\n|----------|-------------------|\n| CreateTask | Creates new task with priority |\n| CreateGoal | Creates goal with priority |\n| SetPreference | Records preference in domain |\n| CreateNeed | Records need with category |\n| CreateRoutine | Records behavioral pattern |\n| EmotionalMarker | Logs emotional state |\n| CreateRelationship | Records person relationship |\n| Correction | Updates belief with correction |\n| TaskCompleted | Updates task status |\n\n资料来源：[extractor.rs:150-180](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/extractor.rs)\n\n## Narrative Arc Tracking\n\nThe system maintains narrative structures to track ongoing storylines in the user's life:\n\n### Arc Status\n\n| Status | Description |\n|--------|-------------|\n| Emerging | Recently detected, accumulating episodes |\n| Active | Continuously developing |\n| Paused | No recent activity, may resume |\n| Resolved | Goal achieved or concluded |\n| Abandoned | Intentionally stopped |\n\n### Chapter Types\n\nNarrative arcs are structured into chapters:\n\n| Type | Description |\n|------|-------------|\n| Setup | Initial context setting |\n| Rising | Building tension or progress |\n| Climax | Peak moment |\n| Falling | Winding down |\n| Resolution | Final conclusion |\n| Interlude | Pause or side-thread |\n\n资料来源：[narrative.rs:50-80](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/narrative.rs)\n\n## Priority Levels\n\nCognitive entities use a priority tier system for urgency-based processing:\n\n| Priority | Activation Threshold | Use Case |\n|----------|---------------------|----------|\n| Critical | 1.00 | Safety, immediate health |\n| High | 0.75 | Important deadlines |\n| Medium | 0.50 | Normal tasks |\n| Low | 0.25 | Nice-to-have items |\n\n资料来源：[state.rs:240-260](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n---\n\n<a id='page-conflict-resolution'></a>\n\n## Conflict Detection and Resolution\n\n### 相关页面\n\n相关主题：[Cognition Layer](#page-cognition-layer), [Core API Reference](#page-core-api)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-core/src/cognition/state.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n- [crates/yantrikdb-core/src/cognition/query_dsl.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/query_dsl.rs)\n- [crates/yantrikdb-core/src/base/types.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/types.rs)\n- [crates/yantrikdb-core/src/cognition/coherence.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/coherence.rs)\n- [crates/yantrikdb-core/src/cognition/surfacing.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/surfacing.rs)\n</details>\n\n# Conflict Detection and Resolution\n\nThe Conflict Detection and Resolution system is a core cognitive subsystem within yantrikdb that identifies, categorizes, and resolves contradictions between beliefs, memories, and other cognitive nodes stored in the knowledge graph. This system ensures the internal consistency of the user's cognitive model by detecting conflicts, prioritizing them based on severity, and applying appropriate resolution strategies.\n\n## Overview\n\nThe conflict resolution system operates as part of the broader cognitive engine, integrated with the coherence checking pipeline. When contradictions are detected between nodes in the belief network, the system evaluates the evidence supporting each conflicting belief and automatically or semi-automatically resolves the conflict.\n\n资料来源：[crates/yantrikdb-core/src/cognition/coherence.rs]()\n\n## Conflict Types\n\nConflicts in yantrikdb are categorized into five distinct types, each with different priority levels and default handling strategies.\n\n| Conflict Type | Priority | Description |\n|---------------|----------|-------------|\n| `IdentityFact` | Critical | Conflicts about fundamental identity or factual information |\n| `Preference` | High | Contradicting user preferences or stated likes/dislikes |\n| `Temporal` | High | Time-related contradictions (scheduling, deadlines) |\n| `Consolidation` | Medium | Conflicts arising from memory consolidation processes |\n| `Minor` | Low | Minor inconsistencies that don't affect core beliefs |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n```rust\npub enum ConflictType {\n    IdentityFact,\n    Preference,\n    Temporal,\n    Consolidation,\n    Minor,\n}\n```\n\nEach conflict type has an associated default priority that determines how urgently it should be addressed in the surfacing queue.\n\n## Conflict Data Model\n\nThe `Conflict` struct captures all metadata about a detected conflict.\n\n### Conflict Structure\n\n| Field | Type | Description |\n|-------|------|-------------|\n| `conflict_id` | String | Unique identifier for the conflict |\n| `conflict_type` | String | One of the five conflict types |\n| `priority` | String | Priority level (critical, high, medium, low) |\n| `status` | String | Current resolution status |\n| `memory_a` | String | Reference ID of first conflicting memory |\n| `memory_b` | String | Reference ID of second conflicting memory |\n| `entity` | Option\\<String\\> | Associated entity if applicable |\n| `rel_type` | Option\\<String\\> | Relationship type between memories |\n| `detected_at` | f64 | Unix timestamp when conflict was detected |\n| `detected_by` | String | Component or operator that detected the conflict |\n| `detection_reason` | String | Explanation of why this is a conflict |\n| `resolved_at` | Option\\<f64\\> | Timestamp when resolution was applied |\n| `resolved_by` | Option\\<String\\> | Resolution strategy or component |\n| `strategy` | Option\\<String\\> | Resolution strategy used |\n| `winner_rid` | Option\\<String\\> | Reference ID of winning memory |\n| `resolution_note` | Option\\<String\\> | Human-readable explanation of resolution |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n## Conflict Resolution Strategies\n\nWhen a conflict is resolved, the system generates a `ConflictResolutionResult` containing details of the resolution outcome.\n\n| Field | Type | Description |\n|-------|------|-------------|\n| `conflict_id` | String | The resolved conflict's identifier |\n| `strategy` | String | Strategy applied (e.g., \"evidence_based\", \"user_choice\") |\n| `winner_rid` | String | Reference ID of the winning memory node |\n| `loser_tombstoned` | bool | Whether the losing memory was soft-deleted |\n| `new_memory_rid` | Option\\<String\\> | ID of newly created merged memory, if applicable |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n### Resolution Logic\n\nThe coherence checking system applies evidence-based resolution when two beliefs contradict each other. The algorithm compares the evidence count between the two conflicting nodes:\n\n```rust\nlet (loser, winner_label) = match (node_a, node_b) {\n    (Some(a), Some(b)) => {\n        // Prefer keeping the one with more evidence.\n        if a.attrs.evidence_count >= b.attrs.evidence_count {\n            (b.id, a.label.clone())\n        } else {\n            (a.id, b.label.clone())\n        }\n    }\n    (None, Some(_)) => (contradiction.belief_a, \"unknown\".to_string()),\n    (Some(_), None) => (contradiction.belief_b, \"unknown\".to_string()),\n    (None, None) => (contradiction.belief_a, \"unknown\".to_string()),\n};\n```\n\nThe losing belief is demoted (tombstoned), and the explanation reflects which belief had higher evidence support. This approach ensures that beliefs with more corroborating evidence are preserved in the knowledge graph.\n\n资料来源：[crates/yantrikdb-core/src/cognition/coherence.rs]()\n\n## Conflict Surfacing\n\nNot all conflicts require immediate user attention. The surfacing system uses the `SurfaceReason` enum to determine when and how conflicts should be presented to the user.\n\n### Surfacing Reasons Related to Conflicts\n\n| Reason | Base Confidence | Description |\n|--------|-----------------|-------------|\n| `ConflictNeedsResolution` | 0.7 | Active conflict that requires user input |\n| `AnomalyDetected` | 0.65 | Statistical anomaly suggesting hidden conflict |\n| `UrgencyThreshold` | 0.5 | Generic urgency-based surfacing trigger |\n\n资料来源：[crates/yantrikdb-core/src/cognition/surfacing.rs]()\n\n### Surfacing Modes\n\nWhen a conflict is surfaced, the system selects an appropriate presentation mode based on urgency and priority:\n\n| Mode | Disruption Cost | Use Case |\n|------|----------------|----------|\n| `Whisper` | 0.05 | Low-priority informational notes |\n| `Nudge` | 0.25 | Moderate importance, user-initiated check |\n| `Alert` | 0.60 | High-priority conflicts requiring attention |\n| `Preempt` | 0.95 | Critical identity conflicts, immediate attention |\n\n资料来源：[crates/yantrikdb-core/src/cognition/surfacing.rs]()\n\n## Cognitive Edge Kinds and Conflict Detection\n\nThe belief network uses typed edges to represent relationships between cognitive nodes. Certain edge types are directly relevant to conflict detection.\n\n### Epistemic Edges\n\nEdges that participate in belief revision and conflict detection:\n\n| Edge Kind | Activation Transfer | Role |\n|-----------|---------------------|------|\n| `Supports` | 0.7 | Positive evidence for a belief |\n| `Contradicts` | -0.5 | Direct opposition between beliefs |\n\nThe system identifies conflicts when a `Contradicts` edge exists between two belief nodes. These edges have negative activation transfer, meaning they inhibit the target node's activation level.\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs]()\n\n### Edge Classification Methods\n\n```rust\n/// Whether this edge participates in belief revision.\npub fn is_epistemic(self) -> bool {\n    matches!(self, Self::Supports | Self::Contradicts)\n}\n\n/// Whether this edge type is inhibitory (suppresses target activation).\npub fn is_inhibitory(self) -> bool {\n    self.activation_transfer() < 0.0\n}\n```\n\n## Coherence Checking Pipeline\n\nThe coherence checking system is responsible for detecting conflicts as part of the cognitive processing pipeline.\n\n### Operator Priority in Cognitive Loop\n\n| Operator | Priority | Role |\n|----------|----------|------|\n| `Attend` | 10 | Foundation - always run |\n| `Recall` | 9 | Critical for context |\n| `Believe` | 8 | Evidence integration |\n| `Compare` | 7 | Action selection |\n| `Constrain` | 7 | Safety - always run if comparing |\n| `Plan` | 6 | Means-ends reasoning |\n| `Project` | 5 | Forward simulation |\n| `Anticipate` | 4 | Proactive - nice to have |\n| `Assess` | 3 | Meta - can skip under pressure |\n| `CoherenceCheck` | 2 | Maintenance - skip if budget tight |\n\nThe `CoherenceCheck` operator has the lowest priority, meaning it may be skipped when computational budget is constrained. This design ensures that core cognitive functions (attention, recall, belief integration) always execute first.\n\n资料来源：[crates/yantrikdb-core/src/cognition/query_dsl.rs]()\n\n### Fragmentation Detection\n\nThe coherence system also monitors attention fragmentation—how evenly distributed activation is across working set nodes:\n\n```rust\nfn compute_fragmentation(ws: &WorkingSet) -> f64 {\n    if ws.len() <= 1 {\n        return 0.0;\n    }\n    \n    let activations: Vec<f64> = ws.iter().map(|n| n.attrs.activation).collect();\n    let total: f64 = activations.iter().sum();\n    \n    if total <= 0.0 {\n        return 0.0;\n    }\n    \n    // Normalized entropy (Shannon entropy / max entropy)\n    let n = activations.len() as f64;\n    // ...\n}\n```\n\nHigh fragmentation (many nodes with similar activation) can indicate unresolved conflicts competing for attention.\n\n资料来源：[crates/yantrikdb-core/src/cognition/coherence.rs]()\n\n## Python API\n\nThe Python bindings expose conflict resolution through the `PyConflictEngine` interface.\n\n### Key Methods\n\n| Method | Parameters | Return | Description |\n|--------|------------|--------|-------------|\n| `list_conflicts` | `namespace`, `limit` | Vec\\<Dict\\> | List conflicts in namespace |\n| `get_conflict` | `conflict_id` | Option\\<Dict\\> | Retrieve specific conflict |\n| `resolve_conflict` | `conflict_id`, `strategy`, `winner_rid`, `new_text`, `resolution_note` | Dict | Apply resolution strategy |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/cognition.rs]()\n\n### Resolution Example\n\n```python\nresult = db.resolve_conflict(\n    conflict_id=\"conflict_123\",\n    strategy=\"evidence_based\",\n    winner_rid=\"memory_456\",\n    new_text=None,\n    resolution_note=\"Preferred belief with higher evidence count\"\n)\n```\n\n## System Architecture\n\n```mermaid\ngraph TD\n    subgraph \"Cognitive Engine\"\n        A[CognitiveNode Storage] --> B[CoherenceChecker]\n        B --> C[ConflictDetector]\n        C --> D[Conflict Queue]\n        D --> E[SurfaceReason Evaluator]\n        E --> F[ProactiveSuggestion Generator]\n    end\n    \n    subgraph \"Conflict Types\"\n        G[IdentityFact]\n        H[Preference]\n        I[Temporal]\n        J[Consolidation]\n        K[Minor]\n    end\n    \n    subgraph \"Resolution Outcomes\"\n        L[Winner Selected]\n        M[Loser Tombstoned]\n        N[New Merged Memory]\n        O[User Notified]\n    end\n    \n    C --> G\n    C --> H\n    C --> I\n    C --> J\n    C --> K\n    \n    F --> L\n    F --> M\n    F --> N\n    F --> O\n```\n\n## Provenance and Reliability\n\nConflicts are detected based on the provenance source of each memory node. Different provenance types have different reliability priors:\n\n| Provenance | Reliability Prior | Description |\n|------------|------------------|-------------|\n| `Told` | 0.95 | User explicitly stated - highest trust |\n| `Observed` | 0.90 | Directly observed behavior |\n| `Experimented` | 0.85 | Confirmed via controlled experiment |\n| `Consolidated` | 0.80 | Merged from multiple sources |\n| `Extracted` | 0.75 | From external documents |\n| `Inferred` | 0.60 | Pattern-based inference |\n| `SystemDefault` | 0.50 | Default values - weakest |\n\nWhen conflicts involve memories with different provenance sources, the system considers reliability priors in its resolution strategy.\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs]()\n\n## Summary\n\nThe Conflict Detection and Resolution system in yantrikdb provides a robust mechanism for maintaining cognitive consistency:\n\n1. **Detection**: Conflicts are identified through the `Contradicts` edge type in the belief network and during coherence checking operations.\n\n2. **Classification**: Conflicts are categorized into five types with associated priority levels, enabling appropriate handling based on severity.\n\n3. **Resolution**: Evidence-based resolution selects the belief with higher evidence count as the winner, with the losing belief being tombstoned.\n\n4. **Surfacing**: High-priority conflicts trigger the surfacing system to proactively notify the user through appropriate channels (whisper, nudge, alert, or preempt).\n\n5. **Integration**: The system integrates with the broader cognitive engine, respecting operator priorities and computational budgets.\n\n---\n\n<a id='page-mcp-server'></a>\n\n## MCP Server Integration\n\n### 相关页面\n\n相关主题：[Python Bindings](#page-python-bindings)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [src/yantrikdb/mcp/server.py](https://github.com/yantrikos/yantrikdb/blob/main/src/yantrikdb/mcp/server.py)\n- [src/yantrikdb/mcp/tools.py](https://github.com/yantrikos/yantrikdb/blob/main/src/yantrikdb/mcp/tools.py)\n- [src/yantrikdb/mcp/resources.py](https://github.com/yantrikos/yantrikdb/blob/main/src/yantrikdb/mcp/resources.py)\n- [MCP_REDESIGN.md](https://github.com/yantrikos/yantrikdb/blob/main/MCP_REDESIGN.md)\n</details>\n\n# MCP Server Integration\n\nThe MCP (Model Context Protocol) Server Integration provides a standardized interface for AI agents to interact with YantrikDB's persistent cognitive memory. This integration enables AI assistants—including Claude Code, Cursor, Windsurf, and any MCP-compatible client—to automatically remember decisions, recall relevant context, and detect contradictions without explicit user prompting.\n\n## Overview\n\nThe MCP server is a Python-based component built on top of the FastMCP framework that exposes YantrikDB's core capabilities through the Model Context Protocol. This allows AI agents to maintain persistent memory across sessions, automatically consolidating experiences over time.\n\n资料来源：[MCP_REDESIGN.md:1-20]()\n\n### Core Objectives\n\nThe MCP integration was designed to achieve the following success criteria:\n\n| Criterion | Description |\n|-----------|-------------|\n| Zero-configuration setup | `pip install yantrikdb[mcp]` with 3 lines in mcp.json |\n| Automatic memory recall | Agent recalls relevant context at conversation start |\n| Automatic memory storage | Agent remembers decisions, preferences, corrections |\n| Conflict detection | Agent surfaces contradictions naturally |\n| Cross-platform compatibility | Works with Claude Code, Cursor, Windsurf, and any MCP client |\n| Fast first-run | Database initialization completes in under 30 seconds |\n| Session persistence | Memory persists across sessions with gradual consolidation |\n\n资料来源：[MCP_REDESIGN.md:50-58]()\n\n## Architecture\n\n### Component Structure\n\nThe MCP server is organized into three primary modules within `src/yantrikdb/mcp/`:\n\n```mermaid\ngraph TD\n    A[MCP Client<br/>Claude Code, Cursor] --> B[server.py<br/>FastMCP Lifespan]\n    B --> C[tools.py<br/>10 Tool Definitions]\n    B --> D[resources.py<br/>MCP Resources]\n    C --> E[YantrikDB Core<br/>Rust Engine]\n    D --> E\n    E --> F[SQLite Database<br/>memory.db]\n```\n\n资料来源：[MCP_REDESIGN.md:25-35]()\n\n### File Structure\n\n| File | Purpose |\n|------|---------|\n| `server.py` | FastMCP server initialization, lifespan context, YantrikDB + embedder initialization |\n| `tools.py` | 10 tool definitions: remember, recall, relate, entities, beliefs, conflicts, patterns, consolidate, forget, stats |\n| `resources.py` | MCP resource handlers for dynamic data access |\n| `__init__.py` | Main entry point for the MCP command |\n\n资料来源：[MCP_REDESIGN.md:20-30]()\n\n## Available Tools\n\nThe MCP server exposes 10 core tools that AI agents can invoke. Each tool is designed with rich descriptions that guide auto-pilot behavior.\n\n资料来源：[MCP_REDESIGN.md:31-35]()\n\n### Tool Reference\n\n| Tool | Purpose | Key Parameters |\n|------|---------|----------------|\n| `remember` | Store a memory with embedding | `text`, `memory_type`, `importance`, `domain`, `namespace` |\n| `recall` | Retrieve semantically similar memories | `query`, `top_k`, `memory_type`, `domain`, `time_window` |\n| `relate` | Create a relationship between two entities | `src`, `dst`, `rel_type` |\n| `entities` | Query entity graph | `query`, `entity_type`, `top_k` |\n| `beliefs` | Access the belief graph | `query`, `include_inferred` |\n| `conflicts` | List detected memory conflicts | `status`, `priority`, `limit` |\n| `patterns` | Discover recurring patterns | `domain`, `min_confidence`, `limit` |\n| `consolidate` | Trigger memory consolidation | `aggressive` |\n| `forget` | Remove specific memories | `rid` or `query` |\n| `stats` | Get memory statistics | - |\n\n资料来源：[MCP_REDESIGN.md:32-35]()\n\n### Tool Description Quality\n\nThe tool descriptions are designed to be comprehensive, telling agents not just what each tool does but *when* to call it. This guidance supports auto-pilot behavior, similar to the instruction blocks in Claude Code's configuration.\n\n资料来源：[MCP_REDESIGN.md:40-42]()\n\n## Configuration\n\n### Environment Variables\n\nThe MCP server accepts configuration through environment variables:\n\n| Variable | Default | Description |\n|----------|---------|-------------|\n| `YANTRIKDB_DB_PATH` | `memory.db` | Path to the SQLite database file |\n| `YANTRIKDB_EMBEDDING_MODEL` | `potion-base-2M` | Embedding model to use |\n| `YANTRIKDB_EMBEDDING_DIM` | 64 | Embedding dimension |\n\n资料来源：[MCP_REDESIGN.md:36-38]()\n\n### Installation\n\n```bash\npip install yantrikdb[mcp]\n```\n\nAfter installation, the server can be started with:\n\n```bash\nyantrikdb-mcp\n```\n\n### MCP Client Configuration\n\nAdd the following to your MCP client configuration (e.g., `mcp.json`):\n\n```json\n{\n  \"mcpServers\": {\n    \"yantrikdb\": {\n      \"command\": \"yantrikdb-mcp\",\n      \"env\": {\n        \"YANTRIKDB_DB_PATH\": \"/path/to/memory.db\"\n      }\n    }\n  }\n}\n```\n\n## Advanced Capabilities\n\nBeyond the basic remember/recall flow, the MCP server exposes advanced YantrikDB capabilities that are available from the Rust engine but not yet fully utilized by the agent workflow.\n\n资料来源：[MCP_REDESIGN.md:60-70]()\n\n### RecallQuery Builder Options\n\nThe underlying Rust engine supports rich query building:\n\n| Parameter | Type | Description |\n|-----------|------|-------------|\n| `top_k` | `usize` | Number of results to return |\n| `memory_type` | `string` | Filter by episodic, semantic, procedural, declarative |\n| `namespace` | `string` | Logical data partitioning |\n| `time_window` | `(f64, f64)` | Unix timestamp range filter |\n| `domain` | `string` | Subject area filter |\n| `source` | `string` | Memory origin filter |\n| `expand_entities` | `bool` | Include related entity details |\n\n资料来源：[crates/yantrikdb-core/src/cognition/query_dsl.rs:1-20]()\n\n### Conflict Resolution\n\nThe engine supports multiple conflict resolution strategies:\n\n| Strategy | Description |\n|----------|-------------|\n| `keep_a` | Preserve the first memory |\n| `keep_b` | Preserve the second memory |\n| `merge` | Combine both memories with temporal ordering |\n| `ask_user` | Defer resolution to user input |\n\n资料来源：[MCP_REDESIGN.md:62-65]()\n\n### Pattern Mining\n\nPattern mining can be configured with:\n\n- Custom confidence thresholds\n- Domain-specific pattern detection\n- Temporal pattern analysis\n- Entity relationship patterns\n\n资料来源：[MCP_REDESIGN.md:63-65]()\n\n### Personality Profile Extraction\n\nThe engine can extract personality profiles from memory interactions, enabling more personalized agent behavior over time.\n\n资料来源：[MCP_REDESIGN.md:64-66]()\n\n### Spaced Repetition Reinforcement\n\nMemory access automatically triggers spaced repetition reinforcement, strengthening frequently accessed memories and allowing less-used ones to decay naturally.\n\n资料来源：[MCP_REDESIGN.md:65-67]()\n\n### Batch Operations\n\nThe Python bindings support batch record operations for efficiency:\n\n```python\ndb = yantrikdb.YantrikDB.with_default(\"memory.db\")\ndb.record_batch([\n    {\"text\": \"Memory 1\", \"importance\": 0.8},\n    {\"text\": \"Memory 2\", \"importance\": 0.6},\n])\n```\n\n### Replication and Sync\n\nYantrikDB supports CRDT-based replication for multi-device synchronization:\n\n```python\nops = db.extract_ops_since(since_hlc=hlc, since_op_id=op_id)\ndb.apply_ops(ops)\n```\n\n资料来源：[crates/yantrikdb-python/src/py_engine/sync.rs:1-30]()\n\n## Memory Types\n\nThe system supports four primary memory types, each serving distinct cognitive purposes:\n\n| Memory Type | Purpose | Typical Use Case |\n|-------------|---------|------------------|\n| `episodic` | Temporal experiences and events | \"Yesterday I talked about project X\" |\n| `semantic` | Factual knowledge and concepts | \"The user prefers dark mode\" |\n| `procedural` | How-to knowledge and skills | \"How to run the test suite\" |\n| `declarative` | Explicitly stated facts | \"The deadline is March 30\" |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:1-30]()\n\n## Entity and Belief Management\n\n### Entity Graph\n\nThe entity graph maintains relationships between extracted entities:\n\n```mermaid\ngraph LR\n    A[Alice] -->|leads| B[Engineering]\n    B -->|part_of| C[Company]\n    A -->|works_with| D[Bob]\n```\n\n### Belief System\n\nBeliefs have provenance types indicating their source reliability:\n\n| Provenance | Reliability Prior | Description |\n|------------|-------------------|-------------|\n| `told` | 0.95 | User explicitly stated |\n| `observed` | 0.90 | Directly observed behavior |\n| `experimented` | 0.85 | Confirmed via controlled experiment |\n| `extracted` | 0.75 | From external documents |\n| `inferred` | 0.60 | Pattern-based inference |\n| `consolidated` | 0.80 | Merged from multiple sources |\n| `system_default` | 0.50 | Default values |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:1-50]()\n\n## Workflow Examples\n\n### Basic Memory Storage and Retrieval\n\n```python\n# Using Python library directly\nimport yantrikdb\n\ndb = yantrikdb.YantrikDB.with_default(\"memory.db\")\n\n# Store a memory\ndb.record(\"Alice is the engineering lead\", importance=0.8, domain=\"people\")\n\n# Retrieve relevant memories\nresults = db.recall(\"who leads the team?\", top_k=3)\n\n# Create a relationship\ndb.relate(\"Alice\", \"Engineering\", \"leads\")\n```\n\n### Triggering Cognitive Processing\n\n```python\n# Run the think() cognition loop\ndb.think()  # consolidate, detect conflicts, mine patterns\n```\n\nThis single call triggers:\n- Memory consolidation\n- Conflict detection and resolution\n- Pattern mining\n\n资料来源：[README.md:1-40]()\n\n## Cognitive Triggers\n\nThe system supports multiple trigger types for proactive memory maintenance:\n\n| Trigger | Default Cooldown | Default Expiry | Purpose |\n|---------|------------------|----------------|---------|\n| `decay_review` | 3 days | 7 days | Memory decay review |\n| `consolidation_ready` | 1 day | 3 days | Consolidation queue processing |\n| `conflict_escalation` | 2 days | 14 days | Unresolved conflict handling |\n| `temporal_drift` | 14 days | 7 days | Temporal anomaly detection |\n| `redundancy` | 1 day | 7 days | Duplicate memory cleanup |\n| `relationship_insight` | 7 days | 7 days | Entity relationship discovery |\n| `valence_trend` | 7 days | 7 days | Emotional pattern tracking |\n| `entity_anomaly` | 7 days | 7 days | Unusual entity behavior |\n| `pattern_discovered` | 7 days | 7 days | New pattern identification |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs:1-50]()\n\n## Future Enhancements\n\nThe MCP_REDESIGN.md outlines planned improvements:\n\n1. **Rich tool descriptions** — More detailed examples for auto-pilot behavior\n2. **Server instructions** — System prompt injection for agent guidance\n3. **Better error messages** — More informative feedback for debugging\n4. **Streaming responses** — For long-running operations\n5. **Progress indicators** — Real-time feedback during consolidation\n\n资料来源：[MCP_REDESIGN.md:45-55]()\n\n## See Also\n\n- [YantrikDB Core Engine](../core/engine.md)\n- [Python API Reference](../python/api.md)\n- [Conflict Resolution](../core/conflict-resolution.md)\n- [Consolidation System](../core/consolidation.md)\n\n---\n\n<a id='page-python-bindings'></a>\n\n## Python Bindings\n\n### 相关页面\n\n相关主题：[MCP Server Integration](#page-mcp-server), [Installation](#page-installation), [Core API Reference](#page-core-api)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-python/src/py_engine/memory.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/src/py_engine/memory.rs)\n- [crates/yantrikdb-python/src/py_engine/session_temporal.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/src/py_engine/session_temporal.rs)\n- [crates/yantrikdb-python/src/py_types.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/src/py_types.rs)\n- [crates/yantrikdb-core/src/engine/recall.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/recall.rs)\n- [crates/yantrikdb-core/src/cognition/state.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n</details>\n\n# Python Bindings\n\n## Overview\n\nThe Python bindings provide a native Python interface to yantrikdb, enabling Python developers to interact with the memory database using familiar Python idioms. Built on top of the Rust core using [pyo3](https://pyo3.rs/), the bindings expose the full functionality of yantrikdb while maintaining Pythonic conventions for parameter ordering and default values.\n\nThe `PyYantrikDB` class serves as the primary entry point, offering methods for recording memories, querying with semantic search, managing relationships between entities, and triggering cognitive consolidation processes.\n\n## Architecture\n\n```mermaid\ngraph TD\n    A[Python Application] --> B[PyYantrikDB]\n    B --> C[pyo3 Bridge Layer]\n    C --> D[yantrikdb-core]\n    D --> E[SQLite Storage]\n    D --> F[Vector Index]\n    \n    G[py_types.rs] --> C\n    G --> H[Type Conversions]\n    H --> I[PyObject ↔ Rust Structs]\n    \n    J[Default Embedder] --> B\n    J --> K[potion-base-2M<br/>dim=64]\n```\n\nThe binding layer consists of three main components:\n\n| Component | File | Purpose |\n|-----------|------|---------|\n| PyYantrikDB | `py_engine/mod.rs` | Main Python class exposing all methods |\n| Type Conversions | `py_types.rs` | Bidirectional conversion between Rust and Python types |\n| Engine Bridge | `py_engine/*.rs` | Method implementations delegating to core |\n\n## Core API Methods\n\n### Recording Memories\n\nThe `record()` method stores new memories in the database with semantic embeddings. It accepts text input and generates embeddings automatically using the bundled embedder, or accepts pre-computed embeddings for efficiency.\n\n```python\ndb.record(\n    text=\"Alice is the engineering lead\",\n    memory_type=\"episodic\",\n    importance=0.8,\n    valence=0.0,\n    half_life=604800.0,\n    certainty=0.8,\n    domain=\"people\",\n    source=\"user\",\n    namespace=\"default\",\n    emotional_state=None\n)\n```\n\n**Parameters:**\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `text` | `str` | Required | The memory content to store |\n| `memory_type` | `str` | `\"episodic\"` | Memory classification (episodic, semantic, etc.) |\n| `importance` | `float` | `0.5` | Significance score [0.0, 1.0] |\n| `valence` | `float` | `0.0` | Emotional valence [-1.0, 1.0] |\n| `half_life` | `float` | `604800.0` | Decay period in seconds (7 days default) |\n| `certainty` | `float` | `0.8` | Confidence in the memory's accuracy |\n| `domain` | `str` | `\"general\"` | Knowledge domain category |\n| `source` | `str` | `\"user\"` | Origin of the memory |\n| `namespace` | `str` | `\"default\"` | Logical partition for data isolation |\n| `emotional_state` | `str` | `None` | Emotional context at recording time |\n| `embedding` | `List[float]` | `None` | Pre-computed vector (auto-generated if omitted) |\n| `metadata` | `dict` | `None` | Arbitrary key-value metadata |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:13-30]()\n\n### Querying and Recall\n\nThe `recall()` method performs semantic search over stored memories, returning results ranked by relevance. It supports both text queries and pre-computed embedding vectors.\n\n```python\nresults = db.recall(\n    query=\"who leads the team?\",\n    top_k=10,\n    memory_type=None,\n    namespace=None,\n    time_window=None,\n    include_consolidated=False,\n    expand_entities=True,\n    skip_reinforce=False,\n    domain=None,\n    source=None\n)\n```\n\n**Parameters:**\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `query` | `str` | `None` | Natural language search query |\n| `query_embedding` | `List[float]` | `None` | Pre-computed embedding vector |\n| `top_k` | `int` | `10` | Maximum results to return |\n| `time_window` | `Tuple[float, float]` | `None` | Filter by Unix timestamp range |\n| `memory_type` | `str` | `None` | Filter by memory type |\n| `namespace` | `str` | `None` | Filter by namespace |\n| `domain` | `str` | `None` | Filter by domain |\n| `source` | `str` | `None` | Filter by source |\n| `include_consolidated` | `bool` | `False` | Include consolidated memories |\n| `expand_entities` | `bool` | `True` | Expand entity references |\n| `skip_reinforce` | `bool` | `False` | Skip reinforcement learning update |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:60-85]()\n\nThe `recall_text()` method provides a simplified interface for text-based queries with optional filtering:\n\n```python\nresults = db.recall_text(\n    query=\"who leads the team?\",\n    top_k=10,\n    namespace=None,\n    domain=None,\n    source=None\n)\n```\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:125-145]()\n\n### Procedural Memory\n\nProcedural memory stores task-related information and supports reinforcement learning for effectiveness tracking.\n\n```python\n# Record a procedural memory\nrid = db.record_procedural(\n    text=\"How to deploy to production\",\n    domain=\"devops\",\n    task_context=\"deployment workflow\",\n    effectiveness=0.5,\n    namespace=\"default\"\n)\n\n# Reinforce based on outcome\ndb.reinforce_procedural(rid, outcome=0.9)\n```\n\n**Parameters for `record_procedural`:**\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `text` | `str` | Required | Procedure description |\n| `embedding` | `List[float]` | `None` | Pre-computed vector |\n| `domain` | `str` | `\"general\"` | Task domain |\n| `task_context` | `str` | `\"\"` | Contextual information |\n| `effectiveness` | `float` | `0.5` | Initial effectiveness score |\n| `namespace` | `str` | `\"default\"` | Namespace partition |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/session_temporal.rs:45-60]()\n\n### Memory Correction\n\nThe `correct()` method allows updating existing memories with corrections, maintaining an audit trail of original content.\n\n```python\nresult = db.correct(\n    rid=\"existing-memory-rid\",\n    new_text=\"Updated information\",\n    new_importance=0.9,\n    new_valence=0.2,\n    embedding=None,\n    correction_note=\"Corrected factual error\"\n)\n```\n\n**Return Value:**\n\n| Field | Type | Description |\n|-------|------|-------------|\n| `original_rid` | `str` | ID of the original memory |\n| `corrected_rid` | `str` | ID of the new corrected memory |\n| `original_tombstoned` | `bool` | Whether original was soft-deleted |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:100-115]()\n\n### Memory Decay\n\nThe `decay()` method triggers decay calculations across all memories based on access patterns and half-life values.\n\n```python\ndecayed = db.decay(threshold=0.01)\n```\n\n**Parameters:**\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `threshold` | `float` | `0.01` | Minimum importance to retain |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:88-95]()\n\n## Default Embedding Model\n\nThe Python bindings include a bundled embedder (`potion-base-2M`) that provides 64-dimensional embeddings out of the box. This eliminates dependencies on external services like sentence-transformers or ONNX runtime.\n\n```mermaid\ngraph LR\n    A[Input Text] --> B[pyo3 embed_text]\n    B --> C[potion-base-2M]\n    C --> D[64-dim Vector]\n    D --> E[Storage/Recall]\n```\n\nThe embedder is invoked automatically when `embedding` parameters are omitted:\n\n```python\n# Auto-embedding\ndb.record(\"Alice is the engineering lead\")\n\n# Manual embedding\nvector = [0.1, 0.2, ...]  # 64 floats\ndb.record(\"Alice is the engineering lead\", embedding=vector)\n```\n\n资料来源：[README.md](https://github.com/yantrikos/yantrikdb/blob/main/README.md)\n\n## Initialization and Configuration\n\n### Creating a Database Instance\n\n```python\nimport yantrikdb\n\n# Default instance with bundled embedder\ndb = yantrikdb.YantrikDB.with_default(\"memory.db\")\n\n# Work with the database\ndb.record(\"Memory content\", importance=0.8)\nresults = db.recall(\"Query text\")\n\n# Always close when done\ndb.close()\n```\n\n资料来源：[README.md](https://github.com/yantrikos/yantrikdb/blob/main/README.md)\n\n## Type Conversions\n\nThe `py_types.rs` module handles bidirectional conversion between Rust structs and Python objects:\n\n| Rust Type | Python Type | Conversion Function |\n|-----------|-------------|---------------------|\n| `yantrikdb_core::Memory` | `dict` | `memory_to_dict()` |\n| `yantrikdb_core::RecallResult` | `dict` | `recall_result_to_dict()` |\n| `serde_json::Value` | `PyObject` | `json_to_py()` |\n| `Bound<PyDict>` | `serde_json::Value` | `py_to_json()` |\n\n资料来源：[crates/yantrikdb-python/src/py_types.rs:6-40]()\n\n### Memory to Dictionary\n\nThe `memory_to_dict()` function converts a core Memory struct to a Python dictionary matching the Python engine's expected output format:\n\n```rust\npub fn memory_to_dict(py: Python<'_>, mem: &yantrikdb_core::Memory) -> PyResult<PyObject> {\n    let dict = PyDict::new(py);\n    dict.set_item(\"rid\", &mem.rid)?;\n    dict.set_item(\"type\", &mem.memory_type)?;\n    dict.set_item(\"text\", &mem.text)?;\n    dict.set_item(\"created_at\", mem.created_at)?;\n    dict.set_item(\"importance\", mem.importance)?;\n    // ... additional fields\n    Ok(dict.into())\n}\n```\n\n资料来源：[crates/yantrikdb-python/src/py_types.rs:8-25]()\n\n## Return Value Structure\n\n### Recall Results\n\nQuery results are returned as Python dictionaries with the following structure:\n\n```python\n{\n    \"rid\": \"memory-unique-id\",\n    \"type\": \"episodic\",\n    \"text\": \"Memory content\",\n    \"score\": 0.95,           # Relevance score\n    \"created_at\": 1234567890.0,\n    \"importance\": 0.8,\n    \"valence\": 0.0,\n    \"half_life\": 604800.0,\n    \"last_access\": 1234567890.0,\n    \"access_count\": 5,\n    \"consolidation_status\": \"stable\",\n    \"storage_tier\": \"hot\",\n    \"namespace\": \"default\",\n    \"certainty\": 0.8,\n    \"domain\": \"people\",\n    \"source\": \"user\",\n    \"emotional_state\": None,\n    \"metadata\": {}\n}\n```\n\n## Advanced Query Options\n\n### Recall with Sequence Verification\n\nFor applications requiring strong consistency guarantees, `recall_with_seq()` ensures query results reflect all prior writes:\n\n```python\n# After a write operation\ndb.record(\"New memory\", namespace=\"work\")\n\n# Ensure subsequent recall sees the write\nresults = db.recall_with_seq(\n    query_embedding=embedding,\n    top_k=10,\n    min_seq=prior_sequence,\n    namespace=\"work\",\n    timeout=timedelta(seconds=5)\n)\n```\n\n资料来源：[crates/yantrikdb-core/src/engine/recall.rs:50-80]()\n\n### Time-Window Filtering\n\nResults can be filtered to a specific time range using Unix timestamps:\n\n```python\nimport time\n\nnow = time.time()\nweek_ago = now - 604800  # 7 days\n\nresults = db.recall(\n    query=\"meetings\",\n    time_window=(week_ago, now)\n)\n```\n\n## Memory Types\n\nyantrikdb supports multiple memory types for different kinds of information:\n\n| Type | Description |\n|------|-------------|\n| `entity` | Factual knowledge about entities |\n| `episode` |事件性记忆 |\n| `belief` | User beliefs and opinions |\n| `goal` | Goals and objectives |\n| `task` | Tasks and action items |\n| `intent_hypothesis` | Hypothesized user intents |\n| `routine` | Recurring behavioral patterns |\n| `need` | User needs and requirements |\n| `opportunity` | Time-bounded opportunities |\n| `risk` | Potential problems |\n| `preference` | User preferences |\n| `conversation_thread` | Conversational context |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:120-145]()\n\n## Relationship Management\n\nBeyond storing individual memories, yantrikdb supports graph-like relationships between entities:\n\n```python\n# Define relationships\ndb.relate(\"Alice\", \"Engineering\", \"leads\")\ndb.relate(\"Alice\", \"Bob\", \"manages\")\n\n# Query relationships\nedges = db.get_edges(\"Alice\")\n```\n\nThe system supports relationship types including:\n\n| Type | Description |\n|------|-------------|\n| `supports` | Supporting evidence |\n| `contradicts` | Contradicting information |\n| `causes` | Causal relationship |\n| `predicts` | Predictive relationship |\n| `requires` | Prerequisite relationship |\n| `associated_with` | General association |\n| `similar_to` | Similarity connection |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:200-230]()\n\n## Cognitive Processing\n\n### Think Operation\n\nThe `think()` method triggers the cognitive processing pipeline:\n\n```python\ndb.think()  # Consolidate, detect conflicts, mine patterns\n```\n\nThis operation:\n1. Consolidates related memories\n2. Detects conflicts between beliefs\n3. Mines patterns from episodic data\n4. Updates procedural memory effectiveness\n\n## Error Handling\n\nThe Python bindings map Rust errors to appropriate Python exceptions:\n\n| Rust Error | Python Exception |\n|------------|------------------|\n| `RuntimeError` | `RuntimeError` |\n| `ValueError` | `ValueError` |\n| Storage errors | `RuntimeError` |\n\n```python\ntry:\n    db.record(\"Memory\")\nexcept RuntimeError as e:\n    print(f\"Database error: {e}\")\nexcept ValueError as e:\n    print(f\"Invalid input: {e}\")\n```\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:20-25]()\n\n## Best Practices\n\n1. **Always close the database** when done to ensure proper cleanup:\n   ```python\n   db = yantrikdb.YantrikDB(\"memory.db\")\n   try:\n       # operations\n   finally:\n       db.close()\n   ```\n\n2. **Use context managers** when possible for automatic cleanup\n\n3. **Batch operations** when recording multiple related memories\n\n4. **Choose appropriate namespaces** to partition data logically\n\n5. **Set importance values** appropriately to control memory retention and retrieval priority\n\n---\n\n---\n\n## Doramagic 踩坑日志\n\n项目：yantrikos/yantrikdb\n\n摘要：发现 24 个潜在踩坑项，其中 0 个为 high/blocking；最高优先级：安装坑 - 来源证据：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication。\n\n## 1. 安装坑 · 来源证据：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication\n- 对用户的影响：可能阻塞安装或首次运行。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_4ab95be6a3ac4fb192053e8c3829f762 | https://github.com/yantrikos/yantrikdb/issues/9 | 来源讨论提到 node 相关条件，需在安装/试用前复核。\n\n## 2. 安装坑 · 来源证据：Bug: `namespace` parameter ignored in batch `remember` calls — memories always stored under `default`\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：Bug: `namespace` parameter ignored in batch `remember` calls — memories always stored under `default`\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_c37cd96e9c8d476880caca4f7314118e | https://github.com/yantrikos/yantrikdb/issues/2 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 3. 安装坑 · 来源证据：Migration v14→v15 fails: ALTER TABLE on edges view\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：Migration v14→v15 fails: ALTER TABLE on edges view\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_bb378d100e9d472892b1d5e42e640cad | https://github.com/yantrikos/yantrikdb/issues/10 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 4. 安装坑 · 来源证据：[bug] Tombstoned memories still appear in similarity-scan recall results\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：[bug] Tombstoned memories still appear in similarity-scan recall results\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_aa3d426055a44483b47ffd3b9f3fdb6a | https://github.com/yantrikos/yantrikdb/issues/8 | 来源类型 github_issue 暴露的待验证使用条件。\n\n## 5. 安装坑 · 来源证据：[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently disabled\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently disabled\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_17652fc680ba4b64bee5018b2d1514e4 | https://github.com/yantrikos/yantrikdb/issues/6 | 来源讨论提到 docker 相关条件，需在安装/试用前复核。\n\n## 6. 安装坑 · 来源证据：v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_daa2ca5265524c83bb21727be2a980a1 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.10 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 7. 安装坑 · 来源证据：v0.7.11 — pyo3 0.28.3 + python3.14 Support\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.11 — pyo3 0.28.3 + python3.14 Support\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_91b7975fce7d49b6b87ef05b914e80b2 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.11 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 8. 安装坑 · 来源证据：v0.7.4 — Python Bindings: with_default + record_text/recall_text\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.4 — Python Bindings: with_default + record_text/recall_text\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_54938994017d4b5899ad9cef4e6a2723 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.4 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 9. 安装坑 · 来源证据：v0.7.5 — Python UX: TypeError Guard + embedder-download in Default Wheel\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.5 — Python UX: TypeError Guard + embedder-download in Default Wheel\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_be61ad4afd5b4f669a6f727d727474c4 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.5 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 10. 配置坑 · 可能修改宿主 AI 配置\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：项目面向 Claude/Cursor/Codex/Gemini/OpenCode 等宿主，或安装命令涉及用户配置目录。\n- 对用户的影响：安装可能改变本机 AI 工具行为，用户需要知道写入位置和回滚方法。\n- 建议检查：列出会写入的配置文件、目录和卸载/回滚步骤。\n- 防护动作：涉及宿主配置目录时必须给回滚路径，不能只给安装命令。\n- 证据：capability.host_targets | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | host_targets=mcp_host, claude, claude_code\n\n## 11. 配置坑 · 来源证据：v0.7.7 — recall_text Keyword-Only Filter Args\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个配置相关的待验证问题：v0.7.7 — recall_text Keyword-Only Filter Args\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_45587e0ca02f4e95ac36c364d3a88519 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.7 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 12. 能力坑 · 能力判断依赖假设\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：README/documentation is current enough for a first validation pass.\n- 对用户的影响：假设不成立时，用户拿不到承诺的能力。\n- 建议检查：将假设转成下游验证清单。\n- 防护动作：假设必须转成验证项；没有验证结果前不能写成事实。\n- 证据：capability.assumptions | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | README/documentation is current enough for a first validation pass.\n\n## 13. 运行坑 · 来源证据：think() runs consolidation before conflict detection — contradictions get merged\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个运行相关的待验证问题：think() runs consolidation before conflict detection — contradictions get merged\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_6908447fb6a6482f89b1a85e714de42a | https://github.com/yantrikos/yantrikdb/issues/1 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 14. 维护坑 · 维护活跃度未知\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：未记录 last_activity_observed。\n- 对用户的影响：新项目、停更项目和活跃项目会被混在一起，推荐信任度下降。\n- 建议检查：补 GitHub 最近 commit、release、issue/PR 响应信号。\n- 防护动作：维护活跃度未知时，推荐强度不能标为高信任。\n- 证据：evidence.maintainer_signals | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | last_activity_observed missing\n\n## 15. 安全/权限坑 · 下游验证发现风险项\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：no_demo\n- 对用户的影响：下游已经要求复核，不能在页面中弱化。\n- 建议检查：进入安全/权限治理复核队列。\n- 防护动作：下游风险存在时必须保持 review/recommendation 降级。\n- 证据：downstream_validation.risk_items | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | no_demo; severity=medium\n\n## 16. 安全/权限坑 · 存在安全注意事项\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：No sandbox install has been executed yet; downstream must verify before user use.\n- 对用户的影响：用户安装前需要知道权限边界和敏感操作。\n- 建议检查：转成明确权限清单和安全审查提示。\n- 防护动作：安全注意事项必须面向用户前置展示。\n- 证据：risks.safety_notes | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | No sandbox install has been executed yet; downstream must verify before user use.\n\n## 17. 安全/权限坑 · 存在评分风险\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：no_demo\n- 对用户的影响：风险会影响是否适合普通用户安装。\n- 建议检查：把风险写入边界卡，并确认是否需要人工复核。\n- 防护动作：评分风险必须进入边界卡，不能只作为内部分数。\n- 证据：risks.scoring_risks | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | no_demo; severity=medium\n\n## 18. 安全/权限坑 · 来源证据：[bug] POST /v1/admin/snapshot unusable in single-node mode — requires cluster master token that doesn't exist\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：[bug] POST /v1/admin/snapshot unusable in single-node mode — requires cluster master token that doesn't exist\n- 对用户的影响：可能影响授权、密钥配置或安全边界。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_80497be2ab644e66be4fec1a966b4c10 | https://github.com/yantrikos/yantrikdb/issues/7 | 来源讨论提到 node 相关条件，需在安装/试用前复核。\n\n## 19. 安全/权限坑 · 来源证据：[bug] at-rest encryption `key_hex` in TOML has no effect on disk (v0.5.0)\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：[bug] at-rest encryption `key_hex` in TOML has no effect on disk (v0.5.0)\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_ca7c8f7ee1384f9d97652734d01b8d67 | https://github.com/yantrikos/yantrikdb/issues/3 | 来源讨论提到 docker 相关条件，需在安装/试用前复核。\n\n## 20. 安全/权限坑 · 来源证据：v0.7.6 — Drop sentence-transformers + numpy from Default Deps\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：v0.7.6 — Drop sentence-transformers + numpy from Default Deps\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_40bcf8933f1b4ec7a559a746497c3bae | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.6 | 来源讨论提到 windows 相关条件，需在安装/试用前复核。\n\n## 21. 安全/权限坑 · 来源证据：v0.7.8 — Extended Idempotent Migration Runner (closes #10)\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：v0.7.8 — Extended Idempotent Migration Runner (closes #10)\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_e5a77701b7ac401a863105d996cb585c | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.8 | 来源类型 github_release 暴露的待验证使用条件。\n\n## 22. 安全/权限坑 · 来源证据：v0.7.9 — Bundle potion-multilingual-128M (101 Languages) in embedder-download Registry\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：v0.7.9 — Bundle potion-multilingual-128M (101 Languages) in embedder-download Registry\n- 对用户的影响：可能影响授权、密钥配置或安全边界。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_7a590e518c884b5b9a2bbdc995c372fd | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.9 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 23. 维护坑 · issue/PR 响应质量未知\n\n- 严重度：low\n- 证据强度：source_linked\n- 发现：issue_or_pr_quality=unknown。\n- 对用户的影响：用户无法判断遇到问题后是否有人维护。\n- 建议检查：抽样最近 issue/PR，判断是否长期无人处理。\n- 防护动作：issue/PR 响应未知时，必须提示维护风险。\n- 证据：evidence.maintainer_signals | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | issue_or_pr_quality=unknown\n\n## 24. 维护坑 · 发布节奏不明确\n\n- 严重度：low\n- 证据强度：source_linked\n- 发现：release_recency=unknown。\n- 对用户的影响：安装命令和文档可能落后于代码，用户踩坑概率升高。\n- 建议检查：确认最近 release/tag 和 README 安装命令是否一致。\n- 防护动作：发布节奏未知或过期时，安装说明必须标注可能漂移。\n- 证据：evidence.maintainer_signals | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | release_recency=unknown\n\n<!-- canonical_name: yantrikos/yantrikdb; human_manual_source: deepwiki_human_wiki -->\n",
      "markdown_key": "yantrikdb",
      "pages": "draft",
      "source_refs": [
        {
          "evidence_id": "github_repo:1164482810",
          "kind": "repo",
          "supports_claim_ids": [
            "claim_identity",
            "claim_distribution",
            "claim_capability"
          ],
          "url": "https://github.com/yantrikos/yantrikdb"
        },
        {
          "evidence_id": "art_5d8de236598d4951b74656487f7d85ac",
          "kind": "docs",
          "supports_claim_ids": [
            "claim_identity",
            "claim_distribution",
            "claim_capability"
          ],
          "url": "https://github.com/yantrikos/yantrikdb#readme"
        }
      ],
      "summary": "DeepWiki/Human Wiki 完整输出，末尾追加 Discovery Agent 踩坑日志。",
      "title": "yantrikdb 说明书",
      "toc": [
        "https://github.com/yantrikos/yantrikdb 项目说明书",
        "目录",
        "Overview",
        "Architecture Overview",
        "Cognitive Node Model",
        "Cognitive Edge Model",
        "Cognitive Operators",
        "Need Categories",
        "Doramagic 踩坑日志"
      ]
    }
  },
  "quality_gate": {
    "blocking_gaps": [],
    "category_confidence": "medium",
    "compile_status": "ready_for_review",
    "five_assets_present": true,
    "install_sandbox_verified": true,
    "missing_evidence": [],
    "next_action": "publish to Doramagic.ai project surfaces",
    "prompt_preview_boundary_ok": true,
    "publish_status": "publishable",
    "quick_start_verified": true,
    "repo_clone_verified": true,
    "repo_commit": "c4625f695aaf185eb2a5021be91af421ed95bd26",
    "repo_inspection_error": null,
    "repo_inspection_files": [
      "pyproject.toml",
      "README.md",
      "uv.lock",
      "docs/phase_4_3_design.md",
      "docs/wedge_empirical_baseline_2026-05-06.md",
      "docs/wedge_lock_scope_audit_2026-05-06.md",
      "docs/wedge_concurrency_sweep_2026-05-07.md",
      "docs/decoupled_write_path_rfc.md",
      "docs/whitepaper/aidb_whitepaper.md",
      "docs/showcase/wirecard.md",
      "src/yantrikdb/cli.py",
      "src/yantrikdb/consolidate.py",
      "src/yantrikdb/api.py",
      "src/yantrikdb/triggers.py",
      "src/yantrikdb/__init__.py",
      "src/yantrikdb/mcp/tools.py",
      "src/yantrikdb/mcp/resources.py",
      "src/yantrikdb/mcp/server.py",
      "src/yantrikdb/mcp/__init__.py",
      "src/yantrikdb/adapters/crewai.py",
      "src/yantrikdb/adapters/__init__.py",
      "src/yantrikdb/adapters/langchain.py",
      "src/yantrikdb/adapters/openai_agents.py",
      "src/yantrikdb/eval/harness.py",
      "src/yantrikdb/eval/persona_marcus.py",
      "src/yantrikdb/eval/persona_aisha.py",
      "src/yantrikdb/eval/life_simulation.py",
      "src/yantrikdb/eval/synthetic.py",
      "src/yantrikdb/eval/__init__.py",
      "src/yantrikdb/agent/companion.py",
      "src/yantrikdb/agent/background.py",
      "src/yantrikdb/agent/tools.py",
      "src/yantrikdb/agent/embedder.py",
      "src/yantrikdb/agent/voice.py",
      "src/yantrikdb/agent/learning.py",
      "src/yantrikdb/agent/llm.py",
      "src/yantrikdb/agent/context.py",
      "src/yantrikdb/agent/service.py",
      "src/yantrikdb/agent/urges.py",
      "src/yantrikdb/agent/__init__.py"
    ],
    "repo_inspection_verified": true,
    "review_reasons": [],
    "tag_count_ok": true,
    "unsupported_claims": []
  },
  "schema_version": "0.1",
  "user_assets": {
    "ai_context_pack": {
      "asset_id": "ai_context_pack",
      "filename": "AI_CONTEXT_PACK.md",
      "markdown": "# yantrikdb - Doramagic AI Context Pack\n\n> 定位：安装前体验与判断资产。它帮助宿主 AI 有一个好的开始，但不代表已经安装、执行或验证目标项目。\n\n## 充分原则\n\n- **充分原则，不是压缩原则**：AI Context Pack 应该充分到让宿主 AI 在开工前理解项目价值、能力边界、使用入口、风险和证据来源；它可以分层组织，但不以最短摘要为目标。\n- **压缩策略**：只压缩噪声和重复内容，不压缩会影响判断和开工质量的上下文。\n\n## 给宿主 AI 的使用方式\n\n你正在读取 Doramagic 为 yantrikdb 编译的 AI Context Pack。请把它当作开工前上下文：帮助用户理解适合谁、能做什么、如何开始、哪些必须安装后验证、风险在哪里。不要声称你已经安装、运行或执行了目标项目。\n\n## Claim 消费规则\n\n- **事实来源**：Repo Evidence + Claim/Evidence Graph；Human Wiki 只提供显著性、术语和叙事结构。\n- **事实最低状态**：`supported`\n- `supported`：可以作为项目事实使用，但回答中必须引用 claim_id 和证据路径。\n- `weak`：只能作为低置信度线索，必须要求用户继续核实。\n- `inferred`：只能用于风险提示或待确认问题，不能包装成项目事实。\n- `unverified`：不得作为事实使用，应明确说证据不足。\n- `contradicted`：必须展示冲突来源，不得替用户强行选择一个版本。\n\n## 它最适合谁\n\n- **AI 研究者或研究型 Agent 构建者**：README 明确围绕研究、实验或论文工作流展开。 证据：`README.md` Claim：`clm_0002` supported 0.86\n- **正在使用 Claude/Codex/Cursor/Gemini 等宿主 AI 的开发者**：README 或插件配置提到多个宿主 AI。 证据：`README.md` Claim：`clm_0003` supported 0.86\n\n## 它能做什么\n\n- **命令行启动或安装流程**（需要安装后验证）：项目文档中存在可执行命令，真实使用需要在本地或宿主环境中运行这些命令。 证据：`README.md` Claim：`clm_0001` supported 0.86\n\n## 怎么开始\n\n- `pip install yantrikdb-mcp` 证据：`README.md` Claim：`clm_0004` supported 0.86\n- `pip install yantrikdb` 证据：`README.md` Claim：`clm_0004` supported 0.86, `clm_0005` supported 0.86\n\n## 继续前判断卡\n\n- **当前建议**：先做权限沙盒试用\n- **为什么**：项目存在安装命令、宿主配置或本地写入线索，不建议直接进入主力环境，应先在隔离环境试装。\n\n### 30 秒判断\n\n- **现在怎么做**：先做权限沙盒试用\n- **最小安全下一步**：先跑 Prompt Preview；若仍要安装，只在隔离环境试装\n- **先别相信**：工具权限边界不能在安装前相信。\n- **继续会触碰**：命令执行、本地环境或项目文件、宿主 AI 上下文\n\n### 现在可以相信\n\n- **适合人群线索：AI 研究者或研究型 Agent 构建者**（supported）：有 supported claim 或项目证据支撑，但仍不等于真实安装效果。 证据：`README.md` Claim：`clm_0002` supported 0.86\n- **适合人群线索：正在使用 Claude/Codex/Cursor/Gemini 等宿主 AI 的开发者**（supported）：有 supported claim 或项目证据支撑，但仍不等于真实安装效果。 证据：`README.md` Claim：`clm_0003` supported 0.86\n- **能力存在：命令行启动或安装流程**（supported）：可以相信项目包含这类能力线索；是否适合你的具体任务仍要试用或安装后验证。 证据：`README.md` Claim：`clm_0001` supported 0.86\n- **存在 Quick Start / 安装命令线索**（supported）：可以相信项目文档出现过启动或安装入口；不要因此直接在主力环境运行。 证据：`README.md` Claim：`clm_0004` supported 0.86\n\n### 现在还不能相信\n\n- **工具权限边界不能在安装前相信。**（unverified）：MCP/tool 类项目通常会触碰文件、网络、浏览器或外部 API，必须真实检查权限和日志。\n- **真实输出质量不能在安装前相信。**（unverified）：Prompt Preview 只能展示引导方式，不能证明真实项目中的结果质量。\n- **宿主 AI 版本兼容性不能在安装前相信。**（unverified）：Claude、Cursor、Codex、Gemini 等宿主加载规则和版本差异必须在真实环境验证。\n- **不会污染现有宿主 AI 行为，不能直接相信。**（inferred）：Skill、plugin、AGENTS/CLAUDE/GEMINI 指令可能改变宿主 AI 的默认行为。\n- **可安全回滚不能默认相信。**（unverified）：除非项目明确提供卸载和恢复说明，否则必须先在隔离环境验证。\n- **真实安装后是否与用户当前宿主 AI 版本兼容？**（unverified）：兼容性只能通过实际宿主环境验证。\n- **项目输出质量是否满足用户具体任务？**（unverified）：安装前预览只能展示流程和边界，不能替代真实评测。\n- **安装命令是否需要网络、权限或全局写入？**（unverified）：这影响企业环境和个人环境的安装风险。 证据：`README.md`\n\n### 继续会触碰什么\n\n- **命令执行**：包管理器、网络下载、本地插件目录、项目配置或用户主目录。 原因：运行第一条命令就可能产生环境改动；必须先判断是否值得跑。 证据：`README.md`\n- **本地环境或项目文件**：安装结果、插件缓存、项目配置或本地依赖目录。 原因：安装前无法证明写入范围和回滚方式，需要隔离验证。 证据：`README.md`\n- **宿主 AI 上下文**：AI Context Pack、Prompt Preview、Skill 路由、风险规则和项目事实。 原因：导入上下文会影响宿主 AI 后续判断，必须避免把未验证项包装成事实。\n\n### 最小安全下一步\n\n- **先跑 Prompt Preview**：用安装前交互式试用判断工作方式是否匹配，不需要授权或改环境。（适用：任何项目都适用，尤其是输出质量未知时。）\n- **只在隔离目录或测试账号试装**：避免安装命令污染主力宿主 AI、真实项目或用户主目录。（适用：存在命令执行、插件配置或本地写入线索时。）\n- **安装后只验证一个最小任务**：先验证加载、兼容、输出质量和回滚，再决定是否深用。（适用：准备从试用进入真实工作流时。）\n\n### 退出方式\n\n- **保留安装前状态**：记录原始宿主配置和项目状态，后续才能判断是否可恢复。\n- **记录安装命令和写入路径**：没有明确卸载说明时，至少要知道哪些目录或配置需要手动清理。\n- **如果没有回滚路径，不进入主力环境**：不可回滚是继续前阻断项，不应靠信任或运气继续。\n\n## 哪些只能预览\n\n- 解释项目适合谁和能做什么\n- 基于项目文档演示典型对话流程\n- 帮助用户判断是否值得安装或继续研究\n\n## 哪些必须安装后验证\n\n- 真实安装 Skill、插件或 CLI\n- 执行脚本、修改本地文件或访问外部服务\n- 验证真实输出质量、性能和兼容性\n\n## 边界与风险判断卡\n\n- **把安装前预览误认为真实运行**：用户可能高估项目已经完成的配置、权限和兼容性验证。 处理方式：明确区分 prompt_preview_can_do 与 runtime_required。 Claim：`clm_0006` inferred 0.45\n- **命令执行会修改本地环境**：安装命令可能写入用户主目录、宿主插件目录或项目配置。 处理方式：先在隔离环境或测试账号中运行。 证据：`README.md` Claim：`clm_0007` supported 0.86\n- **待确认**：真实安装后是否与用户当前宿主 AI 版本兼容？。原因：兼容性只能通过实际宿主环境验证。\n- **待确认**：项目输出质量是否满足用户具体任务？。原因：安装前预览只能展示流程和边界，不能替代真实评测。\n- **待确认**：安装命令是否需要网络、权限或全局写入？。原因：这影响企业环境和个人环境的安装风险。\n\n## 开工前工作上下文\n\n### 加载顺序\n\n- 先读取 how_to_use.host_ai_instruction，建立安装前判断资产的边界。\n- 读取 claim_graph_summary，确认事实来自 Claim/Evidence Graph，而不是 Human Wiki 叙事。\n- 再读取 intended_users、capabilities 和 quick_start_candidates，判断用户是否匹配。\n- 需要执行具体任务时，优先查 role_skill_index，再查 evidence_index。\n- 遇到真实安装、文件修改、网络访问、性能或兼容性问题时，转入 risk_card 和 boundaries.runtime_required。\n\n### 任务路由\n\n- **命令行启动或安装流程**：先说明这是安装后验证能力，再给出安装前检查清单。 边界：必须真实安装或运行后验证。 证据：`README.md` Claim：`clm_0001` supported 0.86\n\n### 上下文规模\n\n- 文件总数：240\n- 重要文件覆盖：25/240\n- 证据索引条目：24\n- 角色 / Skill 条目：11\n\n### 证据不足时的处理\n\n- **missing_evidence**：说明证据不足，要求用户提供目标文件、README 段落或安装后验证记录；不要补全事实。\n- **out_of_scope_request**：说明该任务超出当前 AI Context Pack 证据范围，并建议用户先查看 Human Manual 或真实安装后验证。\n- **runtime_request**：给出安装前检查清单和命令来源，但不要替用户执行命令或声称已执行。\n- **source_conflict**：同时展示冲突来源，标记为待核实，不要强行选择一个版本。\n\n## Prompt Recipes\n\n### 适配判断\n\n- 目标：判断这个项目是否适合用户当前任务。\n- 预期输出：适配结论、关键理由、证据引用、安装前可预览内容、必须安装后验证内容、下一步建议。\n\n```text\n请基于 yantrikdb 的 AI Context Pack，先问我 3 个必要问题，然后判断它是否适合我的任务。回答必须包含：适合谁、能做什么、不能做什么、是否值得安装、证据来自哪里。所有项目事实必须引用 evidence_refs、source_paths 或 claim_id。\n```\n\n### 安装前体验\n\n- 目标：让用户在安装前感受核心工作流，同时避免把预览包装成真实能力或营销承诺。\n- 预期输出：一段带边界标签的体验剧本、安装后验证清单和谨慎建议；不含真实运行承诺或强营销表述。\n\n```text\n请把 yantrikdb 当作安装前体验资产，而不是已安装工具或真实运行环境。\n\n请严格输出四段：\n1. 先问我 3 个必要问题。\n2. 给出一段“体验剧本”：用 [安装前可预览]、[必须安装后验证]、[证据不足] 三种标签展示它可能如何引导工作流。\n3. 给出安装后验证清单：列出哪些能力只有真实安装、真实宿主加载、真实项目运行后才能确认。\n4. 给出谨慎建议：只能说“值得继续研究/试装”“先补充信息后再判断”或“不建议继续”，不得替项目背书。\n\n硬性边界：\n- 不要声称已经安装、运行、执行测试、修改文件或产生真实结果。\n- 不要写“自动适配”“确保通过”“完美适配”“强烈建议安装”等承诺性表达。\n- 如果描述安装后的工作方式，必须使用“如果安装成功且宿主正确加载 Skill，它可能会……”这种条件句。\n- 体验剧本只能写成“示例台词/假设流程”：使用“可能会询问/可能会建议/可能会展示”，不要写“已写入、已生成、已通过、正在运行、正在生成”。\n- Prompt Preview 不负责给安装命令；如用户准备试装，只能提示先阅读 Quick Start 和 Risk Card，并在隔离环境验证。\n- 所有项目事实必须来自 supported claim、evidence_refs 或 source_paths；inferred/unverified 只能作风险或待确认项。\n\n```\n\n### 角色 / Skill 选择\n\n- 目标：从项目里的角色或 Skill 中挑选最匹配的资产。\n- 预期输出：候选角色或 Skill 列表，每项包含适用场景、证据路径、风险边界和是否需要安装后验证。\n\n```text\n请读取 role_skill_index，根据我的目标任务推荐 3-5 个最相关的角色或 Skill。每个推荐都要说明适用场景、可能输出、风险边界和 evidence_refs。\n```\n\n### 风险预检\n\n- 目标：安装或引入前识别环境、权限、规则冲突和质量风险。\n- 预期输出：环境、权限、依赖、许可、宿主冲突、质量风险和未知项的检查清单。\n\n```text\n请基于 risk_card、boundaries 和 quick_start_candidates，给我一份安装前风险预检清单。不要替我执行命令，只说明我应该检查什么、为什么检查、失败会有什么影响。\n```\n\n### 宿主 AI 开工指令\n\n- 目标：把项目上下文转成一次对话开始前的宿主 AI 指令。\n- 预期输出：一段边界明确、证据引用明确、适合复制给宿主 AI 的开工前指令。\n\n```text\n请基于 yantrikdb 的 AI Context Pack，生成一段我可以粘贴给宿主 AI 的开工前指令。这段指令必须遵守 not_runtime=true，不能声称项目已经安装、运行或产生真实结果。\n```\n\n\n## 角色 / Skill 索引\n\n- 共索引 11 个角色 / Skill / 项目文档条目。\n\n- **YantrikDB — A Cognitive Memory Engine for Persistent AI Systems**（project_doc）：YantrikDB — A Cognitive Memory Engine for Persistent AI Systems 激活提示：当用户需要理解项目结构、安装方式或边界时参考。 证据：`README.md`\n- **RFC: Decoupled Write Path engine v0.7.0**（project_doc）：RFC: Decoupled Write Path engine v0.7.0 激活提示：当用户需要理解项目结构、安装方式或边界时参考。 证据：`docs/decoupled_write_path_rfc.md`\n- **Phase 4.3 — SQL writes off foreground design memo**（project_doc）：Phase 4.3 — SQL writes off foreground design memo 激活提示：当用户需要理解项目结构、安装方式或边界时参考。 证据：`docs/phase_4_3_design.md`\n- **Wedge — Concurrency Scaling Sweep**（project_doc）：Date: 2026-05-07 Engine version: v0.6.5 @ 36ba7da clean main, no Patch A Harness: crates/yantrikdb-core/examples/wedge repro.rs Params: dim=384, warmup=2000 records, duration=20s, readers=4, writers ∈ {1, 4, 8, 16, 32} Goal: characterize the wedge knee — at what writer concurrency does the engine start to bleed? 激活提示：当用户需要理解项目结构、安装方式或边界时参考。 证据：`docs/wedge_concurrency_sweep_2026-05-07.md`\n- **Wedge — Empirical Baseline**（project_doc）：Captured by: yantrikdb-core claude-opus-4-7 Date: 2026-05-06 Harness: crates/yantrikdb-core/examples/wedge repro.rs Engine version: v0.6.5 @ 36ba7da Goal: empirically confirm the lock-scope audit's mechanism hypothesis audit doc docs/wedge lock scope audit 2026-05-06.md and establish baseline numbers for measuring fixes against. 激活提示：当用户需要理解项目结构、安装方式或边界时参考。 证据：`docs/wedge_empirical_baseline_2026-05-06.md`\n- **Lock-Scope Audit — Engine Hot Paths**（project_doc）：Lock-Scope Audit — Engine Hot Paths 激活提示：当用户需要理解项目结构、安装方式或边界时参考。 证据：`docs/wedge_lock_scope_audit_2026-05-06.md`\n- **What a claim-graph sees that a vector DB doesn't**（project_doc）：What a claim-graph sees that a vector DB doesn't 激活提示：当用户需要理解项目结构、安装方式或边界时参考。 证据：`docs/showcase/wirecard.md`\n- **1. Introduction**（project_doc）：The emergence of large language models has created AI systems capable of sophisticated reasoning, yet fundamentally amnesic. Each conversation begins from zero. Every user preference must be re-stated. No continuity of relationship develops over time. This is not merely an inconvenience---it represents a structural barrier to AI systems that genuinely know their users. 激活提示：当用户需要理解项目结构、安装方式或边界时参考。 证据：`docs/whitepaper/aidb_whitepaper.md`\n- **YantrikDB Concurrency Invariants**（project_doc）：This document records the load-bearing concurrency invariants of the yantrikdb engine. Several inline // See CONCURRENCY.md comments in the code resolve to this file. Violating any of these silently regresses the wedge fix or correctness; review carefully before changing the affected code paths. 激活提示：当用户需要理解项目结构、安装方式或边界时参考。 证据：`CONCURRENCY.md`\n- **Contributors**（project_doc）：YantrikDB is primarily developed by Pranab Sarkar https://github.com/spranab with substantive contributions from the wider community. This file records contributions that didn't render correctly in commit metadata or warrant standalone acknowledgment. 激活提示：当用户需要理解项目结构、安装方式或边界时参考。 证据：`CONTRIBUTORS.md`\n- **YantrikDB MCP Server Redesign — Session Brief**（project_doc）：YantrikDB MCP Server Redesign — Session Brief 激活提示：当用户需要理解项目结构、安装方式或边界时参考。 证据：`MCP_REDESIGN.md`\n\n## 证据索引\n\n- 共索引 24 条证据。\n\n- **YantrikDB — A Cognitive Memory Engine for Persistent AI Systems**（documentation）：YantrikDB — A Cognitive Memory Engine for Persistent AI Systems 证据：`README.md`\n- **License**（source_file）：GNU AFFERO GENERAL PUBLIC LICENSE Version 3, 19 November 2007 证据：`LICENSE`\n- **License**（source_file）：GNU AFFERO GENERAL PUBLIC LICENSE Version 3, 19 November 2007 证据：`crates/yantrikdb-python/LICENSE`\n- **RFC: Decoupled Write Path engine v0.7.0**（documentation）：RFC: Decoupled Write Path engine v0.7.0 证据：`docs/decoupled_write_path_rfc.md`\n- **Phase 4.3 — SQL writes off foreground design memo**（documentation）：Phase 4.3 — SQL writes off foreground design memo 证据：`docs/phase_4_3_design.md`\n- **Wedge — Concurrency Scaling Sweep**（documentation）：Date: 2026-05-07 Engine version: v0.6.5 @ 36ba7da clean main, no Patch A Harness: crates/yantrikdb-core/examples/wedge repro.rs Params: dim=384, warmup=2000 records, duration=20s, readers=4, writers ∈ {1, 4, 8, 16, 32} Goal: characterize the wedge knee — at what writer concurrency does the engine start to bleed? 证据：`docs/wedge_concurrency_sweep_2026-05-07.md`\n- **Wedge — Empirical Baseline**（documentation）：Captured by: yantrikdb-core claude-opus-4-7 Date: 2026-05-06 Harness: crates/yantrikdb-core/examples/wedge repro.rs Engine version: v0.6.5 @ 36ba7da Goal: empirically confirm the lock-scope audit's mechanism hypothesis audit doc docs/wedge lock scope audit 2026-05-06.md and establish baseline numbers for measuring fixes against. 证据：`docs/wedge_empirical_baseline_2026-05-06.md`\n- **Lock-Scope Audit — Engine Hot Paths**（documentation）：Lock-Scope Audit — Engine Hot Paths 证据：`docs/wedge_lock_scope_audit_2026-05-06.md`\n- **What a claim-graph sees that a vector DB doesn't**（documentation）：What a claim-graph sees that a vector DB doesn't 证据：`docs/showcase/wirecard.md`\n- **1. Introduction**（documentation）：The emergence of large language models has created AI systems capable of sophisticated reasoning, yet fundamentally amnesic. Each conversation begins from zero. Every user preference must be re-stated. No continuity of relationship develops over time. This is not merely an inconvenience---it represents a structural barrier to AI systems that genuinely know their users. 证据：`docs/whitepaper/aidb_whitepaper.md`\n- **YantrikDB Concurrency Invariants**（documentation）：This document records the load-bearing concurrency invariants of the yantrikdb engine. Several inline // See CONCURRENCY.md comments in the code resolve to this file. Violating any of these silently regresses the wedge fix or correctness; review carefully before changing the affected code paths. 证据：`CONCURRENCY.md`\n- **Contributors**（documentation）：YantrikDB is primarily developed by Pranab Sarkar https://github.com/spranab with substantive contributions from the wider community. This file records contributions that didn't render correctly in commit metadata or warrant standalone acknowledgment. 证据：`CONTRIBUTORS.md`\n- **YantrikDB MCP Server Redesign — Session Brief**（documentation）：YantrikDB MCP Server Redesign — Session Brief 证据：`MCP_REDESIGN.md`\n- **Config**（structured_config）：{\"model type\": \"model2vec\", \"architectures\": \"StaticModel\" ,\"tokenizer name\": \"baai/bge-base-en-v1.5\", \"apply pca\": 64, \"apply zipf\": true, \"hidden dim\": 64, \"seq length\": 1000000, \"normalize\": true} 证据：`crates/yantrikdb-core/assets/potion-base-2M/config.json`\n- **Modules**（structured_config）：{ \"idx\": 0, \"name\": \"0\", \"path\": \".\", \"type\": \"sentence transformers.models.StaticEmbedding\" }, { \"idx\": 1, \"name\": \"1\", \"path\": \"1 Normalize\", \"type\": \"sentence transformers.models.Normalize\" } 证据：`crates/yantrikdb-core/assets/potion-base-2M/modules.json`\n- **Tokenizer**（structured_config）：{ \"version\": \"1.0\", \"truncation\": null, \"padding\": null, \"added tokens\": { \"id\": 0, \"content\": \" PAD \", \"single word\": false, \"lstrip\": false, \"rstrip\": false, \"normalized\": false, \"special\": true }, { \"id\": 1, \"content\": \" UNK \", \"single word\": false, \"lstrip\": false, \"rstrip\": false, \"normalized\": false, \"special\": true }, { \"id\": 2, \"content\": \" CLS \", \"single word\": false, \"lstrip\": false, \"rstrip\": false, \"normalized\": false, \"special\": true }, { \"id\": 3, \"content\": \" SEP \", \"single word\": false, \"lstrip\": false, \"rstrip\": false, \"normalized\": false, \"special\": true }, { \"id\": 4, \"content\": \" MASK \", \"single word\": false, \"lstrip\": false, \"rstrip\": false, \"normalized\": false, \"special\"… 证据：`crates/yantrikdb-core/assets/potion-base-2M/tokenizer.json`\n- **Scheduled Tasks**（source_file）：{\"sessionId\":\"48c5baf6-0baa-4c37-93d2-a8f22722b261\",\"pid\":92712,\"acquiredAt\":1778091707761} 证据：`.claude/scheduled_tasks.lock`\n- **MCP / Saga**（source_file）：MCP / Saga .mcp.json .tracker.db .tracker.db-shm .tracker.db-wal .env 证据：`.gitignore`\n- **Compiled Rust extension — pylint can't introspect a .pyd/.so file to find**（source_file）：MASTER Compiled Rust extension — pylint can't introspect a .pyd/.so file to find names, so E0611 no-name-in-module fires false-positive for every import from this module. Adding it to the allow-list tells pylint to trust at face value rather than parse-and-verify. extension-pkg-allow-list=yantrikdb. yantrikdb rust 证据：`.pylintrc`\n- **Optimize dependencies candle, tokenizers even in dev builds.**（source_file）：workspace resolver = \"2\" members = \"crates/yantrikdb-core\", \"crates/yantrikdb-python\", exclude = \"crates/yantrikdb-wasm\", 证据：`Cargo.toml`\n- **Bench Rust Vs Python**（source_file）：\"\"\"Benchmarks for AIDB Rust engine core operations. 证据：`benchmarks/bench_rust_vs_python.py`\n- **Explicitly declare the LICENSE file for PEP 639 compliance. Without**（source_file）：build-system requires = \"maturin =1.5\" build-backend = \"maturin\" 证据：`pyproject.toml`\n- **── 1. Test corpus: yantrikdb-shaped memories ──**（source_file）：\"\"\" Empirical quality eval: potion-base-2M vs all-MiniLM-L6-v2 vs Slice A hash-trick baseline, on yantrikdb-shaped memory texts. 证据：`scratch/eval_potion_2m.py`\n- **!/usr/bin/env python3**（source_file）：!/usr/bin/env python3 \"\"\"Run the AIDB evaluation harness with real embeddings. 证据：`scripts/run_eval.py`\n\n## 宿主 AI 必须遵守的规则\n\n- **把本资产当作开工前上下文，而不是运行环境。**：AI Context Pack 只包含证据化项目理解，不包含目标项目的可执行状态。 证据：`README.md`, `LICENSE`, `crates/yantrikdb-python/LICENSE`\n- **回答用户时区分可预览内容与必须安装后才能验证的内容。**：安装前体验的消费者价值来自降低误装和误判，而不是伪装成真实运行。 证据：`README.md`, `LICENSE`, `crates/yantrikdb-python/LICENSE`\n\n## 用户开工前应该回答的问题\n\n- 你准备在哪个宿主 AI 或本地环境中使用它？\n- 你只是想先体验工作流，还是准备真实安装？\n- 你最在意的是安装成本、输出质量、还是和现有规则的冲突？\n\n## 验收标准\n\n- 所有能力声明都能回指到 evidence_refs 中的文件路径。\n- AI_CONTEXT_PACK.md 没有把预览包装成真实运行。\n- 用户能在 3 分钟内看懂适合谁、能做什么、如何开始和风险边界。\n\n---\n\n## Doramagic Context Augmentation\n\n下面内容用于强化 Repomix/AI Context Pack 主体。Human Manual 只提供阅读骨架；踩坑日志会被转成宿主 AI 必须遵守的工作约束。\n\n## Human Manual 骨架\n\n使用规则：这里只是项目阅读路线和显著性信号，不是事实权威。具体事实仍必须回到 repo evidence / Claim Graph。\n\n宿主 AI 硬性规则：\n- 不得把页标题、章节顺序、摘要或 importance 当作项目事实证据。\n- 解释 Human Manual 骨架时，必须明确说它只是阅读路线/显著性信号。\n- 能力、安装、兼容性、运行状态和风险判断必须引用 repo evidence、source path 或 Claim Graph。\n\n- **Overview**：importance `high`\n  - source_paths: README.md, crates/yantrikdb-core/src/lib.rs\n- **Installation**：importance `high`\n  - source_paths: pyproject.toml, crates/yantrikdb-python/Cargo.toml, crates/yantrikdb-python/pyproject.toml, src/yantrikdb/__init__.py\n- **Five-Index Architecture**：importance `high`\n  - source_paths: crates/yantrikdb-core/src/vector/hnsw.rs, crates/yantrikdb-core/src/vector/delta_index.rs, crates/yantrikdb-core/src/knowledge/graph.rs, crates/yantrikdb-core/src/knowledge/graph_index.rs, crates/yantrikdb-core/src/engine/storage.rs\n- **Decoupled Write Path (LSM Architecture)**：importance `high`\n  - source_paths: docs/decoupled_write_path_rfc.md, CONCURRENCY.md, crates/yantrikdb-core/src/vector/delta_index.rs, crates/yantrikdb-core/src/engine/materializer.rs\n- **Storage Engine**：importance `medium`\n  - source_paths: crates/yantrikdb-core/src/engine/storage.rs, crates/yantrikdb-core/src/engine/record.rs, crates/yantrikdb-core/src/engine/recall.rs, crates/yantrikdb-core/src/base/schema.rs\n- **Core API Reference**：importance `high`\n  - source_paths: crates/yantrikdb-core/src/engine/mod.rs, crates/yantrikdb-core/src/engine/record.rs, crates/yantrikdb-core/src/engine/recall.rs, crates/yantrikdb-core/src/engine/graph_ops.rs, crates/yantrikdb-core/src/base/scoring.rs\n- **Cognition Layer**：importance `high`\n  - source_paths: crates/yantrikdb-core/src/cognition/mod.rs, crates/yantrikdb-core/src/cognition/consolidate.rs, crates/yantrikdb-core/src/cognition/patterns.rs, crates/yantrikdb-core/src/cognition/belief.rs, crates/yantrikdb-core/src/cognition/schema_induction.rs\n- **Conflict Detection and Resolution**：importance `medium`\n  - source_paths: crates/yantrikdb-core/src/cognition/contradiction.rs, crates/yantrikdb-core/src/cognition/belief_network.rs, crates/yantrikdb-core/src/engine/conflict.rs, docs/showcase/wirecard.md\n\n## Repo Inspection Evidence / 源码检查证据\n\n- repo_clone_verified: true\n- repo_inspection_verified: true\n- repo_commit: `c4625f695aaf185eb2a5021be91af421ed95bd26`\n- inspected_files: `pyproject.toml`, `README.md`, `uv.lock`, `docs/phase_4_3_design.md`, `docs/wedge_empirical_baseline_2026-05-06.md`, `docs/wedge_lock_scope_audit_2026-05-06.md`, `docs/wedge_concurrency_sweep_2026-05-07.md`, `docs/decoupled_write_path_rfc.md`, `docs/whitepaper/aidb_whitepaper.md`, `docs/showcase/wirecard.md`, `src/yantrikdb/cli.py`, `src/yantrikdb/consolidate.py`, `src/yantrikdb/api.py`, `src/yantrikdb/triggers.py`, `src/yantrikdb/__init__.py`, `src/yantrikdb/mcp/tools.py`, `src/yantrikdb/mcp/resources.py`, `src/yantrikdb/mcp/server.py`, `src/yantrikdb/mcp/__init__.py`, `src/yantrikdb/adapters/crewai.py`\n\n宿主 AI 硬性规则：\n- 没有 repo_clone_verified=true 时，不得声称已经读过源码。\n- 没有 repo_inspection_verified=true 时，不得把 README/docs/package 文件判断写成事实。\n- 没有 quick_start_verified=true 时，不得声称 Quick Start 已跑通。\n\n## Doramagic Pitfall Constraints / 踩坑约束\n\n这些规则来自 Doramagic 发现、验证或编译过程中的项目专属坑点。宿主 AI 必须把它们当作工作约束，而不是普通说明文字。\n\n### Constraint 1: 来源证据：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication\n\n- Trigger: GitHub 社区证据显示该项目存在一个安装相关的待验证问题：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication\n- Host AI rule: 来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- Why it matters: 可能阻塞安装或首次运行。\n- Evidence: community_evidence:github | cevd_4ab95be6a3ac4fb192053e8c3829f762 | https://github.com/yantrikos/yantrikdb/issues/9 | 来源讨论提到 node 相关条件，需在安装/试用前复核。\n- Hard boundary: 不要把这个坑点包装成已解决、已验证或可忽略，除非后续验证证据明确证明它已经关闭。\n\n### Constraint 2: 来源证据：Bug: `namespace` parameter ignored in batch `remember` calls — memories always stored under `default`\n\n- Trigger: GitHub 社区证据显示该项目存在一个安装相关的待验证问题：Bug: `namespace` parameter ignored in batch `remember` calls — memories always stored under `default`\n- Host AI rule: 来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- Why it matters: 可能增加新用户试用和生产接入成本。\n- Evidence: community_evidence:github | cevd_c37cd96e9c8d476880caca4f7314118e | https://github.com/yantrikos/yantrikdb/issues/2 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n- Hard boundary: 不要把这个坑点包装成已解决、已验证或可忽略，除非后续验证证据明确证明它已经关闭。\n\n### Constraint 3: 来源证据：Migration v14→v15 fails: ALTER TABLE on edges view\n\n- Trigger: GitHub 社区证据显示该项目存在一个安装相关的待验证问题：Migration v14→v15 fails: ALTER TABLE on edges view\n- Host AI rule: 来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- Why it matters: 可能影响升级、迁移或版本选择。\n- Evidence: community_evidence:github | cevd_bb378d100e9d472892b1d5e42e640cad | https://github.com/yantrikos/yantrikdb/issues/10 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n- Hard boundary: 不要把这个坑点包装成已解决、已验证或可忽略，除非后续验证证据明确证明它已经关闭。\n\n### Constraint 4: 来源证据：[bug] Tombstoned memories still appear in similarity-scan recall results\n\n- Trigger: GitHub 社区证据显示该项目存在一个安装相关的待验证问题：[bug] Tombstoned memories still appear in similarity-scan recall results\n- Host AI rule: 来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- Why it matters: 可能增加新用户试用和生产接入成本。\n- Evidence: community_evidence:github | cevd_aa3d426055a44483b47ffd3b9f3fdb6a | https://github.com/yantrikos/yantrikdb/issues/8 | 来源类型 github_issue 暴露的待验证使用条件。\n- Hard boundary: 不要把这个坑点包装成已解决、已验证或可忽略，除非后续验证证据明确证明它已经关闭。\n\n### Constraint 5: 来源证据：[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently disabled\n\n- Trigger: GitHub 社区证据显示该项目存在一个安装相关的待验证问题：[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently disabled\n- Host AI rule: 来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- Why it matters: 可能增加新用户试用和生产接入成本。\n- Evidence: community_evidence:github | cevd_17652fc680ba4b64bee5018b2d1514e4 | https://github.com/yantrikos/yantrikdb/issues/6 | 来源讨论提到 docker 相关条件，需在安装/试用前复核。\n- Hard boundary: 不要把这个坑点包装成已解决、已验证或可忽略，除非后续验证证据明确证明它已经关闭。\n\n### Constraint 6: 来源证据：v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)\n\n- Trigger: GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)\n- Host AI rule: 来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- Why it matters: 可能增加新用户试用和生产接入成本。\n- Evidence: community_evidence:github | cevd_daa2ca5265524c83bb21727be2a980a1 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.10 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n- Hard boundary: 不要把这个坑点包装成已解决、已验证或可忽略，除非后续验证证据明确证明它已经关闭。\n\n### Constraint 7: 来源证据：v0.7.11 — pyo3 0.28.3 + python3.14 Support\n\n- Trigger: GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.11 — pyo3 0.28.3 + python3.14 Support\n- Host AI rule: 来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- Why it matters: 可能影响升级、迁移或版本选择。\n- Evidence: community_evidence:github | cevd_91b7975fce7d49b6b87ef05b914e80b2 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.11 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n- Hard boundary: 不要把这个坑点包装成已解决、已验证或可忽略，除非后续验证证据明确证明它已经关闭。\n\n### Constraint 8: 来源证据：v0.7.4 — Python Bindings: with_default + record_text/recall_text\n\n- Trigger: GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.4 — Python Bindings: with_default + record_text/recall_text\n- Host AI rule: 来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- Why it matters: 可能影响升级、迁移或版本选择。\n- Evidence: community_evidence:github | cevd_54938994017d4b5899ad9cef4e6a2723 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.4 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n- Hard boundary: 不要把这个坑点包装成已解决、已验证或可忽略，除非后续验证证据明确证明它已经关闭。\n\n### Constraint 9: 来源证据：v0.7.5 — Python UX: TypeError Guard + embedder-download in Default Wheel\n\n- Trigger: GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.5 — Python UX: TypeError Guard + embedder-download in Default Wheel\n- Host AI rule: 来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- Why it matters: 可能增加新用户试用和生产接入成本。\n- Evidence: community_evidence:github | cevd_be61ad4afd5b4f669a6f727d727474c4 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.5 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n- Hard boundary: 不要把这个坑点包装成已解决、已验证或可忽略，除非后续验证证据明确证明它已经关闭。\n\n### Constraint 10: 可能修改宿主 AI 配置\n\n- Trigger: 项目面向 Claude/Cursor/Codex/Gemini/OpenCode 等宿主，或安装命令涉及用户配置目录。\n- Host AI rule: 列出会写入的配置文件、目录和卸载/回滚步骤。\n- Why it matters: 安装可能改变本机 AI 工具行为，用户需要知道写入位置和回滚方法。\n- Evidence: capability.host_targets | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | host_targets=mcp_host, claude, claude_code\n- Hard boundary: 不要把这个坑点包装成已解决、已验证或可忽略，除非后续验证证据明确证明它已经关闭。\n",
      "summary": "给宿主 AI 的上下文和工作边界。",
      "title": "AI Context Pack / 带给我的 AI"
    },
    "boundary_risk_card": {
      "asset_id": "boundary_risk_card",
      "filename": "BOUNDARY_RISK_CARD.md",
      "markdown": "# Boundary & Risk Card / 安装前决策卡\n\n项目：yantrikos/yantrikdb\n\n## Doramagic 试用结论\n\n当前结论：可以进入发布前推荐检查；首次使用仍应从最小权限、临时目录和可回滚配置开始。\n\n## 用户现在可以做\n\n- 可以先阅读 Human Manual，理解项目目的和主要工作流。\n- 可以复制 Prompt Preview 做安装前体验；这只验证交互感，不代表真实运行。\n- 可以把官方 Quick Start 命令放到隔离环境中验证，不要直接进主力环境。\n\n## 现在不要做\n\n- 不要把 Prompt Preview 当成项目实际运行结果。\n- 不要把 metadata-only validation 当成沙箱安装验证。\n- 不要把未验证能力写成“已支持、已跑通、可放心安装”。\n- 不要在首次试用时交出生产数据、私人文件、真实密钥或主力配置目录。\n\n## 安装前检查\n\n- 宿主 AI 是否匹配：mcp_host, claude, claude_code\n- 官方安装入口状态：已发现官方入口\n- 是否在临时目录、临时宿主或容器中验证：必须是\n- 是否能回滚配置改动：必须能\n- 是否需要 API Key、网络访问、读写文件或修改宿主配置：未确认前按高风险处理\n- 是否记录了安装命令、实际输出和失败日志：必须记录\n\n## 当前阻塞项\n\n- 无阻塞项。\n\n## 项目专属踩坑\n\n- 来源证据：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication（medium）：可能阻塞安装或首次运行。 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 来源证据：Bug: `namespace` parameter ignored in batch `remember` calls — memories always stored under `default`（medium）：可能增加新用户试用和生产接入成本。 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 来源证据：Migration v14→v15 fails: ALTER TABLE on edges view（medium）：可能影响升级、迁移或版本选择。 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 来源证据：[bug] Tombstoned memories still appear in similarity-scan recall results（medium）：可能增加新用户试用和生产接入成本。 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 来源证据：[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently disabled（medium）：可能增加新用户试用和生产接入成本。 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n\n## 风险与权限提示\n\n- no_demo: medium\n\n## 证据缺口\n\n- 暂未发现结构化证据缺口。\n",
      "summary": "安装、权限、验证和推荐前风险。",
      "title": "Boundary & Risk Card / 边界与风险卡"
    },
    "human_manual": {
      "asset_id": "human_manual",
      "filename": "HUMAN_MANUAL.md",
      "markdown": "# https://github.com/yantrikos/yantrikdb 项目说明书\n\n生成时间：2026-05-16 13:00:42 UTC\n\n## 目录\n\n- [Overview](#page-overview)\n- [Installation](#page-installation)\n- [Five-Index Architecture](#page-five-index-architecture)\n- [Decoupled Write Path (LSM Architecture)](#page-decoupled-write-path)\n- [Storage Engine](#page-storage-engine)\n- [Core API Reference](#page-core-api)\n- [Cognition Layer](#page-cognition-layer)\n- [Conflict Detection and Resolution](#page-conflict-resolution)\n- [MCP Server Integration](#page-mcp-server)\n- [Python Bindings](#page-python-bindings)\n\n<a id='page-overview'></a>\n\n## Overview\n\n### 相关页面\n\n相关主题：[Five-Index Architecture](#page-five-index-architecture), [Core API Reference](#page-core-api), [Installation](#page-installation)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-core/src/cognition/state.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n- [crates/yantrikdb-core/src/cognition/query_dsl.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/query_dsl.rs)\n- [crates/yantrikdb-core/src/base/types.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/types.rs)\n- [crates/yantrikdb-core/src/cognition/narrative.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/narrative.rs)\n- [crates/yantrikdb-core/src/cognition/personality_bias.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/personality_bias.rs)\n- [crates/yantrikdb-core/src/cognition/receptivity.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/receptivity.rs)\n- [crates/yantrikdb-core/src/engine/query_dsl.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/query_dsl.rs)\n</details>\n\n# Overview\n\nYantrikDB is a cognitive memory database system designed to model, store, and reason about complex human mental states and behaviors. It provides a unified architecture for managing episodic memories, semantic knowledge, and procedural information while supporting advanced cognitive operations such as attention spreading, belief revision, and proactive suggestion surfacing.\n\nThe system bridges traditional database storage with cognitive science principles, enabling applications that require understanding of user intent, emotional states, preferences, and behavioral patterns.\n\n## Architecture Overview\n\nYantrikDB follows a layered architecture that separates storage, cognitive processing, and query execution concerns.\n\n```mermaid\ngraph TD\n    A[Python API Layer] --> B[Query DSL Engine]\n    B --> C[Cognition Module]\n    C --> D[Working Set Cache]\n    D --> E[SQLite Storage]\n    C --> F[Conflict Resolution]\n    C --> G[Proactive Surfacing]\n    C --> H[Narrative Tracking]\n```\n\n### Core Modules\n\n| Module | Location | Purpose |\n|--------|----------|---------|\n| `state.rs` | `cognition/` | Defines cognitive node types, edge kinds, and universal attributes |\n| `query_dsl.rs` | `cognition/` | Specifies cognitive operators (Recall, Believe, Plan, etc.) |\n| `narrative.rs` | `cognition/` | Manages narrative arcs and story tracking |\n| `personality_bias.rs` | `cognition/` | Models personality dimensions affecting system behavior |\n| `receptivity.rs` | `cognition/` | Tracks user activity levels and notification preferences |\n| `types.rs` | `base/` | Defines conflict types, trigger mechanisms, and configuration |\n| `engine/query_dsl.rs` | `engine/` | Executes cognitive operators against the database |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:1-50]()\n\n## Cognitive Node Model\n\nEvery entity in YantrikDB is represented as a cognitive node with universal attributes that determine how it participates in reasoning, memory consolidation, and action selection.\n\n### Node Kinds\n\nThe system supports 15 distinct node types representing different mental constructs:\n\n| Kind | Persistence | Description |\n|------|-------------|-------------|\n| `Entity` | Yes | Physical or conceptual objects |\n| `Episode` | Yes | Temporal memory of events |\n| `Belief` | Yes | User-held beliefs about the world |\n| `Goal` | Yes | Desired end states |\n| `Task` | Yes | Actionable items with status tracking |\n| `IntentHypothesis` | No | Transient intent guesses |\n| `Routine` | Yes | Recurring behavioral patterns |\n| `Need` | Yes | User needs (8 categories) |\n| `Opportunity` | Yes | Time-bounded chances for action |\n| `Risk` | Yes | Potential problems |\n| `Constraint` | Yes | Safety or preference constraints |\n| `Preference` | Yes | User preferences |\n| `ConversationThread` | No | Transient conversation state |\n| `ActionSchema` | Yes | Reusable action templates |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:150-180]()\n\n### Universal Cognitive Attributes\n\nEvery node carries 11 dimensions that govern its lifecycle and behavior:\n\n```mermaid\ngraph LR\n    A[Cognitive Node] --> B[confidence 0-1]\n    A --> C[activation 0-1]\n    A --> D[salience 0-1]\n    A --> E[persistence 0-1]\n    A --> F[valence -1 to 1]\n    A --> G[urgency 0-1]\n    A --> H[novelty 0-1]\n    A --> I[volatility 0-1]\n    A --> J[provenance]\n    A --> K[evidence_count]\n```\n\nThe default values for each node kind vary based on their expected characteristics:\n\n| NodeKind | confidence | salience | persistence |\n|----------|------------|----------|-------------|\n| Entity | 0.90 | 0.80 | 0.95 |\n| Episode | 0.70 | 0.70 | 0.30 |\n| Belief | 0.60 | 0.70 | 0.60 |\n| Goal | 0.80 | 0.90 | 0.80 |\n| Task | 0.90 | 0.80 | 0.40 |\n| Need | 0.60 | 0.70 | 0.40 |\n| Opportunity | 0.40 | 0.60 | 0.20 |\n| Risk | 0.40 | 0.70 | 0.60 |\n| Constraint | 0.90 | 0.80 | 0.95 |\n| Preference | 0.60 | 0.50 | 0.85 |\n| ActionSchema | 0.70 | 0.40 | 0.90 |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:280-320]()\n\n## Cognitive Edge Model\n\nNodes connect through typed edges that encode semantic relationships and govern activation spreading.\n\n### Edge Kinds\n\nThere are 15 edge types in the cognitive graph:\n\n| Edge Kind | Transfer Factor | Type | Description |\n|-----------|-----------------|------|-------------|\n| `supports` | 0.7 | Positive | Evidence backing a belief |\n| `contradicts` | -0.5 | Inhibitory | Evidence opposing a belief |\n| `causes` | 0.8 | Positive | Causal relationship |\n| `predicts` | 0.4 | Positive | Future outcome prediction |\n| `prevents` | -0.6 | Inhibitory | Blocks an outcome |\n| `advances_goal` | 0.6 | Positive | Progress toward goal |\n| `blocks_goal` | -0.5 | Inhibitory | Impedes goal progress |\n| `subtask_of` | 0.4 | Positive | Decomposition relationship |\n| `requires` | 0.5 | Positive | Prerequisite relationship |\n| `associated_with` | 0.3 | Moderate | General correlation |\n| `instance_of` | 0.3 | Moderate | Categorization |\n| `part_of` | 0.3 | Moderate | Compositional |\n| `similar_to` | 0.3 | Moderate | Analogy |\n| `precedes_temporally` | 0.2 | Moderate | Temporal ordering |\n| `triggers` | 0.7 | Positive | Event initiation |\n| `prefers` | 0.3 | Moderate | Preference relationship |\n| `avoids` | -0.3 | Inhibitory | Avoidance pattern |\n| `constrains` | -0.2 | Inhibitory | Limitation relationship |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:120-160]()\n\n### Edge Behavior Methods\n\nEach edge kind provides metadata through dedicated methods:\n\n- `activation_transfer()` - Returns the spreading activation factor (-1.0 to 1.0)\n- `is_inhibitory()` - Boolean indicating suppression behavior\n- `is_epistemic()` - Whether edge participates in belief revision\n- `is_causal()` - Whether edge represents causal relationships\n\n```rust\npub fn is_inhibitory(self) -> bool {\n    self.activation_transfer() < 0.0\n}\n\npub fn is_epistemic(self) -> bool {\n    matches!(self, Self::Supports | Self::Contradicts)\n}\n\npub fn is_causal(self) -> bool {\n    matches!(self, Self::Causes | Self::Predicts | Self::Prevents)\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:180-200]()\n\n## Cognitive Operators\n\nThe query DSL defines 10 operators that compose the cognitive processing pipeline:\n\n| Operator | Priority | Purpose |\n|----------|----------|---------|\n| `Attend` | 10 | Foundation — always runs first |\n| `Recall` | 9 | Critical for context retrieval |\n| `Believe` | 8 | Evidence integration |\n| `Compare` | 7 | Action selection |\n| `Constrain` | 7 | Safety validation |\n| `Plan` | 6 | Means-ends reasoning |\n| `Project` | 5 | Forward simulation |\n| `Anticipate` | 4 | Proactive reasoning |\n| `Assess` | 3 | Meta-cognitive evaluation |\n| `CoherenceCheck` | 2 | Maintenance under budget pressure |\n\n资料来源：[crates/yantrikdb-core/src/cognition/query_dsl.rs:30-45]()\n\n### Operator Parameters\n\nEach operator accepts typed parameters:\n\n```rust\npub struct AttendOp {\n    pub seeds: Vec<NodeId>,\n    pub max_hops: u32,\n    pub decay: f64,\n}\n\npub struct RecallOp {\n    pub top_k: usize,\n    pub query: Option<String>,\n    pub domain: Option<String>,\n}\n\npub struct BelieveOp {\n    pub evidence: EvidenceInput,\n}\n\npub struct EvidenceInput {\n    pub target: Option<NodeId>,\n    pub observation: String,\n    pub direction: i32,  // positive = confirming, negative = contradicting\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/cognition/query_dsl.rs:55-80]()\n\n### Execution Flow\n\n```mermaid\ngraph TD\n    A[Cognitive Query] --> B{Operator Type}\n    B --> C[Attend]\n    B --> D[Recall]\n    B --> E[Believe]\n    B --> F[Compare]\n    B --> G[Plan]\n    B --> H[Project]\n    B --> I[Anticipate]\n    B --> J[Assess]\n    B --> K[CoherenceCheck]\n    \n    C --> L[Working Set Hydration]\n    L --> M[Activation Boost on Seeds]\n    M --> N[Spreading Activation]\n    N --> O[StepOutput]\n```\n\nThe executor processes operators by first hydrating the working set from SQLite, then executing operator-specific logic:\n\n```rust\nfn execute_attend(&self, op: &AttendOp) -> StepOutput {\n    match self.db.hydrate_working_set(self.attention_config.clone()) {\n        Ok(mut ws) => {\n            let mut activated = 0;\n            let mut top = Vec::new();\n            for &seed in &op.seeds {\n                if let Some(node) = ws.get_mut(seed) {\n                    let new_activation = (node.attrs.activation + 0.3).min(1.0);\n                    node.attrs.activation = new_activation;\n                    top.push((seed, new_activation));\n                    activated += 1;\n                }\n            }\n            for &seed in &op.seeds {\n                activated += ws.activate_and_spread(seed, 0.3);\n            }\n            StepOutput::Attend { nodes_activated: activated, top_activated: top }\n        }\n        Err(e) => StepOutput::Error { message: format!(\"Attend failed: {}\", e) },\n    }\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/engine/query_dsl.rs:100-130]()\n\n## Need Categories\n\nThe system models 8 categories of human needs:\n\n| Category | Description |\n|----------|-------------|\n| `Informational` | Knowledge and understanding needs |\n| `Social` | Connection and relationship needs |\n| `Emotional` | Affective and psychological needs |\n| `Organizational` | Structure and planning needs |\n| `Creative` | Self-expression and innovation needs |\n| `Health` | Physical and mental wellness needs |\n| `Financial` | Economic and resource needs |\n| `Professional` | Career and work-related needs |\n\n```rust\npub fn from_str(s: &str) -> Self {\n    match s {\n        \"informational\" => Self::Informational,\n        \"social\" => Self::Social,\n        \"emotional\" => Self::Emotional,\n        \"organizational\" => Self::Organizational,\n        \"creative\" => Self::Creative,\n        \"health\" => Self::Health,\n        \"financial\" => Self::Financial,\n        \"professional\" => Self::Professional,\n        _ => Self::Informational,\n    }\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:10-45]()\n\n## Provenance and Reliability\n\nEvery cognitive node carries provenance metadata indicating its source:\n\n| Provenance | Reliability Prior | Description |\n|------------|-------------------|-------------|\n| `Told` | 0.95 | User explicitly stated |\n| `Observed` | 0.90 | Directly observed behavior |\n| `Experimented` | 0.85 | Confirmed via controlled experiment |\n| `Consolidated` | 0.80 | Merged from multiple sources |\n| `Extracted` | 0.75 | From external documents |\n| `Inferred` | 0.60 | Pattern-based inference |\n| `SystemDefault` | 0.50 | Defaults — weakest trust |\n\n```rust\npub fn reliability_prior(self) -> f64 {\n    match self {\n        Self::Told => 0.95,\n        Self::Observed => 0.90,\n        Self::Experimented => 0.85,\n        Self::Consolidated => 0.80,\n        Self::Extracted => 0.75,\n        Self::Inferred => 0.60,\n        Self::SystemDefault => 0.50,\n    }\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:220-250]()\n\n## Action Kinds and Costs\n\nThe system models 8 types of actions with associated base costs:\n\n| Action | Base Cost | Description |\n|--------|-----------|-------------|\n| `Abstain` | 0.0 | Explicitly decide inaction |\n| `Inform` | 0.05 | Provide information |\n| `Organize` | 0.10 | Structure content |\n| `Suggest` | 0.15 | Propose an option |\n| `Communicate` | 0.20 | Send a message |\n| `Schedule` | 0.25 | Create calendar events |\n| `Execute` | 0.30 | Take direct action |\n| `Warn` | 0.30 | Alert about risk |\n\nHigher cost indicates more disruption to the user.\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:250-290]()\n\n## Conflict Resolution\n\nThe system detects and resolves conflicts between memories using policy-aware evaluation:\n\n### Conflict Types\n\n| Type | Default Priority | Description |\n|------|------------------|-------------|\n| `IdentityFact` | critical | Conflicting identity claims |\n| `Preference` | high | Contradicting preferences |\n| `Temporal` | high | Time-based contradictions |\n| `Consolidation` | medium | During memory consolidation |\n| `Minor` | low | Minor inconsistencies |\n\n### Conflict Detection Flow\n\n```mermaid\ngraph TD\n    A[Memory Operations] --> B[Candidate Pair Generation]\n    B --> C{Policy Check}\n    C -->|overlap_allowed| D[Flag as Conflict]\n    C -->|temporal_required| E{Time Validation}\n    E -->|Valid| D\n    E -->|Invalid| F[Apply Missing Time Severity]\n    C -->|No Policy| G[Default Behavior]\n```\n\nThe conflict resolution system queries namespace-specific policies:\n\n```sql\nSELECT overlap_allowed, temporal_required, missing_time_severity\nFROM relation_policies\nWHERE relation_type = ?1 AND (namespace = ?2 OR namespace = '*')\nORDER BY CASE WHEN namespace = ?2 THEN 0 ELSE 1 END\n```\n\n资料来源：[crates/yantrikdb-core/src/distributed/conflict.rs:50-80]()\n\n## Narrative Tracking\n\nYantrikDB tracks narrative arcs to understand ongoing stories and life patterns:\n\n### Arc Types\n\n| Type | Description |\n|------|-------------|\n| `Relationship` | Interpersonal dynamics |\n| `Project` | Goal-oriented endeavors |\n| `Habit` | Recurring behaviors |\n| `Discovery` | Learning journeys |\n| `Loss` | Negative life events |\n| `Recovery` | Healing processes |\n\n### Arc Lifecycle\n\n```mermaid\ngraph LR\n    A[Emerging] --> B[Active]\n    B --> C[Paused]\n    C -->|Resume| B\n    C --> D[Resolved]\n    C --> E[Abandoned]\n    A -->|Quality| E\n```\n\n### Chapter Types\n\nWithin arcs, chapters progress through phases:\n\n| Type | Purpose |\n|------|---------|\n| `Setup` | Initial context setting |\n| `Rising` | Building tension or progress |\n| `Climax` | Peak moment |\n| `Falling` | Winding down |\n| `Resolution` | Final conclusion |\n| `Interlude` | Pause between main chapters |\n\n资料来源：[crates/yantrikdb-core/src/cognition/narrative.rs:30-80]()\n\n## Personality Model\n\nThe system models personality across 8 dimensions affecting system behavior:\n\n| Dimension | Description |\n|-----------|-------------|\n| `curiosity` | Drive to explore and learn |\n| `proactivity` | Tendency to initiate action |\n| `caution` | Risk aversion level |\n| `warmth` | Emotional engagement |\n| `efficiency` | Optimization preference |\n| `playfulness` | Humor and levity |\n| `formality` | Communication style |\n| `persistence` | Follow-through tendency |\n\n```rust\npub const DIMENSION_NAMES: [&'static str; 8] = [\n    \"curiosity\", \"proactivity\", \"caution\", \"warmth\",\n    \"efficiency\", \"playfulness\", \"formality\", \"persistence\",\n];\n\npub fn similarity(&self, other: &Self) -> f64 {\n    // Cosine similarity between personality vectors\n    let mut dot = 0.0;\n    let mut mag_a = 0.0;\n    let mut mag_b = 0.0;\n    for i in 0..Self::DIMENSIONS {\n        let a = self.dimension(i);\n        let b = other.dimension(i);\n        dot += a * b;\n        mag_a += a * a;\n        mag_b += b * b;\n    }\n    // Normalized cosine similarity\n    (dot / (mag_a.sqrt() * mag_b.sqrt())).clamp(-1.0, 1.0)\n}\n```\n\n资料来源：[crates/yantrikdb-core/src/cognition/personality_bias.rs:50-90]()\n\n## User Receptivity\n\nThe system tracks user activity states to optimize notification timing:\n\n### Activity Levels\n\n| Level | Interruption Cost | Description |\n|-------|-------------------|-------------|\n| `Idle` | 0.15 | No active engagement |\n| `JustReturned` | 0.35 | Recently became active |\n| `Browsing` | 0.45 | Casual content consumption |\n| `Communicating` | 0.50 | In active conversation |\n| `TaskSwitching` | 0.55 | Mid-task context switch |\n| `FocusedWork` | 0.75 | Deep concentration |\n| `DeepFocus` | 0.95 | Critical focus period |\n\n### Notification Modes\n\n| Mode | Behavior |\n|------|----------|\n| `All` | All notifications allowed |\n| `ImportantOnly` | Only important notifications |\n| `DoNotDisturb` | Block all notifications |\n\n资料来源：[crates/yantrikdb-core/src/cognition/receptivity.rs:20-70]()\n\n## Think() Configuration\n\nThe cognitive loop is configured via `ThinkConfig`:\n\n```rust\npub struct ThinkConfig {\n    pub importance_threshold: f64,\n    pub decay_threshold: f64,\n    pub max_triggers: usize,\n}\n```\n\n### Trigger Types\n\n| Type | Cooldown | Expiry |\n|------|----------|--------|\n| `DecayReview` | 3 days | 7 days |\n| `ConsolidationReady` | 1 day | 3 days |\n| `ConflictEscalation` | 2 days | 14 days |\n| `TemporalDrift` | 14 days | 7 days |\n| `Redundancy` | 1 day | 7 days |\n| `RelationshipInsight` | 7 days | 7 days |\n| `ValenceTrend` | 7 days | 7 days |\n| `EntityAnomaly` | 7 days | 7 days |\n| `PatternDiscovered` | 7 days | 7 days |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs:100-150]()\n\n## Python API\n\nThe system exposes a Python interface for memory operations:\n\n```rust\n#[pyo3(signature = (\n    query=None, embedding=None, top_k=10, memory_type=None, namespace=None,\n    time_window=None, expand_entities=false, include_consolidated=false,\n    skip_reinforce=false, domain=None, source=None\n))]\nfn query(\n    &self,\n    py: Python<'_>,\n    query: Option<&str>,\n    embedding: Option<Vec<f32>>,\n    top_k: usize,\n    // ... additional parameters\n) -> PyResult<Vec<PyObject>>\n```\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:50-90]()\n\n## Summary\n\nYantrikDB provides a comprehensive cognitive memory architecture featuring:\n\n- **15 cognitive node types** with 11 universal attributes each\n- **18 edge kinds** with configurable activation transfer\n- **10 cognitive operators** for reasoning and context management\n- **8 need categories** for human motivation modeling\n- **7 provenance levels** with reliability priors\n- **8 personality dimensions** for behavioral adaptation\n- **6 activity states** for interruption optimization\n- **Policy-aware conflict resolution** with namespace support\n- **Narrative arc tracking** across 6 lifecycle states\n\nThe system balances persistent storage with working-set caching to support real-time cognitive operations while maintaining long-term memory integrity.\n\n---\n\n<a id='page-installation'></a>\n\n## Installation\n\n### 相关页面\n\n相关主题：[Overview](#page-overview), [Python Bindings](#page-python-bindings), [MCP Server Integration](#page-mcp-server)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [pyproject.toml](https://github.com/yantrikos/yantrikdb/blob/main/pyproject.toml)\n- [crates/yantrikdb-python/Cargo.toml](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/Cargo.toml)\n- [crates/yantrikdb-python/pyproject.toml](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/pyproject.toml)\n- [src/yantrikdb/__init__.py](https://github.com/yantrikos/yantrikdb/blob/main/src/yantrikdb/__init__.py)\n</details>\n\n# Installation\n\nThis guide covers all supported methods for installing YantrikDB across different platforms and use cases.\n\n## Prerequisites\n\n### System Requirements\n\n| Requirement | Minimum | Recommended |\n|-------------|---------|-------------|\n| Python | 3.9 | 3.11+ |\n| Operating System | Linux, macOS, Windows | Linux (x86_64, aarch64), macOS (Intel, Apple Silicon), Windows (\nError with Openai API: output new_sensitive (1027)\n\nPlease check that you have set the OPENAI_API_KEY environment variable with a valid API key.\n\n---\n\n<a id='page-five-index-architecture'></a>\n\n## Five-Index Architecture\n\n### 相关页面\n\n相关主题：[Decoupled Write Path (LSM Architecture)](#page-decoupled-write-path), [Storage Engine](#page-storage-engine), [Core API Reference](#page-core-api)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-core/src/vector/hnsw.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/vector/hnsw.rs)\n- [crates/yantrikdb-core/src/vector/delta_index.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/vector/delta_index.rs)\n- [crates/yantrikdb-core/src/knowledge/graph.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/knowledge/graph.rs)\n- [crates/yantrikdb-core/src/knowledge/graph_index.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/knowledge/graph_index.rs)\n- [crates/yantrikdb-core/src/engine/storage.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/storage.rs)\n- [crates/yantrikdb-core/src/engine/indices.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/indices.rs)\n</details>\n\n# Five-Index Architecture\n\n## Overview\n\nThe Five-Index Architecture is yantrikdb's multi-layered indexing system designed to support diverse query patterns across cognitive memory types. Each index layer specializes in a specific access pattern—vector similarity, temporal ordering, graph traversal, full-text search, and structured filtering—enabling the engine to retrieve relevant memories with minimal latency while maintaining consistency across the working set and persistent storage.\n\nThe architecture divides responsibility across five specialized index types:\n\n| Index | Primary Role | Access Pattern |\n|-------|--------------|----------------|\n| **HNSW Index** | Vector similarity search | ANN queries on embeddings |\n| **Delta Index** | Recent writes and updates | In-memory working set |\n| **Graph Index** | Relationship traversal | Multi-hop graph queries |\n| **Storage Index** | Persistent record management | CRUD operations with SQLite |\n| **Recall Index** | Federated cross-index queries | Multi-dimensional recall |\n\n资料来源：[crates/yantrikdb-core/src/engine/indices.rs:1-50]()\n\n## Architecture Diagram\n\n```mermaid\ngraph TB\n    subgraph \"Query Interface\"\n        Q[RecallQuery]\n    end\n    \n    subgraph \"Five Index Layers\"\n        H[HNSW Index<br/>Vector ANN]\n        D[Delta Index<br/>Working Set]\n        G[Graph Index<br/>Relationships]\n        S[Storage Index<br/>Persistent SQLite]\n        R[Recall Index<br/>Federated Router]\n    end\n    \n    subgraph \"Data Sources\"\n        E[Embedding Cache]\n        M[Memory Nodes]\n        R2[Relational Tables]\n    end\n    \n    Q --> R\n    R --> H\n    R --> D\n    R --> G\n    R --> S\n    \n    H --> E\n    D --> M\n    G --> M\n    S --> R2\n```\n\n## HNSW Index Layer\n\n### Purpose and Scope\n\nThe Hierarchical Navigable Small World (HNSW) index provides approximate nearest neighbor (ANN) search over high-dimensional embedding vectors. This layer powers semantic similarity queries, enabling the system to retrieve memories based on meaning rather than exact keyword matches.\n\n资料来源：[crates/yantrikdb-core/src/vector/hnsw.rs:1-100]()\n\n### Key Components\n\nThe HNSW implementation in yantrikdb supports the following configuration parameters:\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `m` | `u32` | 16 | Max connections per node |\n| `ef_construction` | `u32` | 200 | Search width during build |\n| `ef_search` | `u32` | 100 | Search width during query |\n| `level_mult` | `f64` | 1/ln(M) | Level generation factor |\n\n### Query Flow\n\n```mermaid\nsequenceDiagram\n    participant Q as Query\n    participant H as HNSW Layer\n    participant E as Embedding Cache\n    participant R as Results\n    \n    Q->>H: RecallQuery with embedding\n    H->>H: Layer 0 scan\n    H->>H: Greedy search up layers\n    H->>E: Fetch top_k candidates\n    E-->>H: Candidate vectors\n    H->>H: Re-rank by distance\n    H-->>R: Ordered results\n```\n\n## Delta Index Layer\n\n### Purpose and Scope\n\nThe Delta Index maintains a working set of recently inserted or updated records before they are flushed to persistent storage. This write buffer enables high-throughput ingestion while preserving query consistency for recent data.\n\n资料来源：[crates/yantrikdb-core/src/vector/delta_index.rs:1-100]()\n\n### Write Path\n\nWhen a new memory is created, the system:\n\n1. Writes to the Delta Index immediately (low latency)\n2. Appending to the HNSW structure if vector is present\n3. Delaying SQLite flush until batch threshold\n\n### Consistency Model\n\nThe Delta Index implements a hybrid consistency model:\n\n- **Read-your-writes**: Queries against recent data include Delta entries\n- **Staleness bound**: Configurable flush interval (default: 1 second)\n- **Rollback support**: Unflushed entries can be discarded on abort\n\n```mermaid\ngraph LR\n    A[Write Request] --> B{Delta Index<br/>In-Memory}\n    B --> C{HNSW Update<br/>Immediate}\n    C --> D[Query Path]\n    B -.->|Flush| E[Storage Index<br/>SQLite]\n    E --> D\n```\n\n## Graph Index Layer\n\n### Purpose and Scope\n\nThe Graph Index manages typed relationships between memory nodes, supporting complex multi-hop queries. Each edge type has associated metadata including activation transfer factors and temporal validity windows.\n\n资料来源：[crates/yantrikdb-core/src/knowledge/graph.rs:1-100]()\n资料来源：[crates/yantrikdb-core/src/knowledge/graph_index.rs:1-100]()\n\n### Supported Edge Types\n\n| Edge Type | Activation Transfer | Use Case |\n|-----------|---------------------|----------|\n| `causes` | 0.8 | Causal chains |\n| `supports` | 0.7 | Supporting evidence |\n| `triggers` | 0.7 | Event triggers |\n| `advances_goal` | 0.6 | Goal progress |\n| `requires` | 0.5 | Prerequisites |\n| `subtask_of` | 0.4 | Hierarchical tasks |\n| `predicts` | 0.4 | Predictive relations |\n| `associated_with` | 0.3 | Weak associations |\n| `similar_to` | 0.3 | Analogy detection |\n| `instance_of` | 0.3 | Categorization |\n| `part_of` | 0.3 | Containment |\n| `precedes_temporally` | 0.2 | Temporal ordering |\n| `contradicts` | -0.5 | Conflict detection |\n| `blocks_goal` | -0.6 | Obstacle modeling |\n| `prevents` | -0.7 | Prevention relations |\n| `constrains` | -0.4 | Constraint edges |\n\n### Graph Traversal API\n\n```rust\n// Core graph traversal via RecallQuery\nRecallQuery::new(embedding)\n    .top_k(10)\n    .expand_entities(true)\n    .max_hops(3)\n```\n\n## Storage Index Layer\n\n### Purpose and Scope\n\nThe Storage Index provides durable persistence for all memory records using SQLite. This layer handles transaction management, crash recovery, and long-term storage optimization.\n\n资料来源：[crates/yantrikdb-core/src/engine/storage.rs:1-100]()\n\n### Schema Overview\n\n| Table | Primary Key | Indexes |\n|-------|-------------|---------|\n| `memories` | `rid` | `namespace`, `created_at`, `kind` |\n| `edges` | `(src, rel_type, dst)` | `rel_type`, `src`, `dst` |\n| `relation_policies` | `(relation_type, namespace)` | `namespace` |\n\n### Query Parameters\n\n| Parameter | Type | Description |\n|-----------|------|-------------|\n| `memory_type` | `Option<&str>` | Filter by node kind |\n| `namespace` | `Option<&str>` | Filter by namespace |\n| `time_window` | `Option<(f64, f64)>` | Temporal bounds |\n| `domain` | `Option<&str>` | Domain classification |\n| `source` | `Option<&str>` | Provenance filter |\n\n## Recall Index Layer\n\n### Purpose and Scope\n\nThe Recall Index acts as a federated query router that orchestrates multi-index searches. It combines results from HNSW, Delta, Graph, and Storage indices according to query parameters and relevance scoring.\n\n### Query Pipeline\n\n```mermaid\nflowchart TD\n    A[RecallQuery] --> B[Parse Parameters]\n    B --> C{HNSW Index}\n    B --> D{Delta Index}\n    B --> E{Graph Index}\n    B --> F{Storage Index}\n    \n    C --> G[Result Merge]\n    D --> G\n    E --> G\n    F --> G\n    \n    G --> H[Re-rank by Score]\n    H --> I[Top-K Selection]\n    I --> J[Return Ordered Results]\n```\n\n### Query Construction\n\n```rust\nlet q = RecallQuery::new(embedding)\n    .top_k(10)\n    .memory_type(\"episodic\")\n    .namespace(\"work\")\n    .time_window(start_ts, end_ts)\n    .expand_entities(true)\n    .include_consolidated(false);\n```\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:50-80]()\n\n## Index Synchronization\n\n### Write Ordering\n\nAll index updates follow a strict ordering guarantee:\n\n1. **Delta Index** receives write first (primary)\n2. **HNSW Index** updated for vector-bearing records\n3. **Graph Index** updated for edge-creating operations\n4. **Storage Index** flush queued for background persistence\n\n### Failure Recovery\n\n| Failure Point | Recovery Action |\n|---------------|-----------------|\n| Delta write fails | Abort entire transaction |\n| HNSW update fails | Mark record inconsistent, retry |\n| Graph update fails | Rollback edge, alert |\n| Storage flush fails | Retain in Delta, retry on restart |\n\n## Performance Characteristics\n\n| Operation | HNSW | Delta | Graph | Storage |\n|-----------|------|-------|-------|---------|\n| Point query | O(log n) | O(1) | O(1) | O(log n) |\n| Range query | N/A | O(n) | O(n) | O(log n + k) |\n| ANN search | O(ef × log n) | N/A | N/A | N/A |\n| Traversal | N/A | N/A | O(m^h) | N/A |\n| Write | O(log n) | O(1) | O(1) | O(log n) |\n\n## Configuration\n\nThe Five-Index system is configured via `ThinkConfig`:\n\n| Parameter | Default | Description |\n|-----------|---------|-------------|\n| `importance_threshold` | 0.5 | Minimum relevance for surfacing |\n| `decay_threshold` | 0.3 | Importance decay trigger |\n| `max_triggers` | 10 | Concurrent trigger limit |\n\n## Summary\n\nThe Five-Index Architecture enables yantrikdb to handle diverse cognitive memory workloads by specializing each index for its access pattern. The HNSW layer provides fast semantic search, Delta absorbs write bursts, Graph manages relationships, Storage ensures durability, and Recall federates queries across all layers. This design allows the system to balance latency, throughput, and consistency according to workload characteristics.\n\n---\n\n<a id='page-decoupled-write-path'></a>\n\n## Decoupled Write Path (LSM Architecture)\n\n### 相关页面\n\n相关主题：[Five-Index Architecture](#page-five-index-architecture), [Storage Engine](#page-storage-engine)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [docs/decoupled_write_path_rfc.md](https://github.com/yantrikos/yantrikdb/blob/main/docs/decoupled_write_path_rfc.md)\n- [CONCURRENCY.md](https://github.com/yantrikos/yantrikdb/blob/main/CONCURRENCY.md)\n- [crates/yantrikdb-core/src/vector/delta_index.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/vector/delta_index.rs)\n- [crates/yantrikdb-core/src/engine/materializer.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/materializer.rs)\n</details>\n\n# Decoupled Write Path (LSM Architecture)\n\n## Overview\n\nThe Decoupled Write Path is the core write infrastructure of yantrikdb, implementing a Log-Structured Merge-tree (LSM) architecture that separates write operations from indexing and compaction. This design ensures high write throughput while maintaining read performance through asynchronous background compaction.\n\nThe architecture is built on the principle that write operations must be as fast as possible, deferring expensive vector indexing work to background processes. This decoupled approach prevents write operations from blocking on indexing operations, enabling the system to handle high concurrency workloads without regression.\n\n资料来源：[CONCURRENCY.md]()\n\n## Architecture Components\n\n### DeltaIndex: The Write Buffer\n\nThe `DeltaIndex` is the primary write buffer in the LSM architecture. It provides lock-free append operations for new entries and tombstone operations for deletions.\n\n```mermaid\ngraph TD\n    subgraph WritePath[\"Write Path\"]\n        W[Write Request] --> DI[DeltaIndex]\n        DI --> |append| DE[DeltaEntry]\n        DE --> |O1 push| PendingVec[Pending Vec]\n    end\n    \n    subgraph CompactionPath[\"Background Compaction\"]\n        PendingVec --> |seal| CT[Cold Tier]\n        CT --> |clone-rebuild| NH[New HnswIndex]\n        NH --> |install| CurrentCold[Current Cold Tier]\n    end\n    \n    subgraph ReadPath[\"Read Path\"]\n        CurrentCold --> |ArcSwap| RR[Read Replicas]\n    end\n```\n\n**DeltaIndex Write Operations:**\n\n| Operation | Complexity | Lock Type | Description |\n|-----------|-----------|-----------|-------------|\n| `append` | O(1) | `RwLock<Vec<DeltaEntry>>` | Add new entry to pending buffer |\n| `tombstone` | O(1) | `RwLock<Vec<DeltaEntry>>` | Mark entry as deleted |\n| `seal_delta_for_compaction` | O(1) | Brief lock hold | Swap pending entries for compaction |\n| `compact` | O(n) rebuild | No foreground locks | Clone and rebuild cold tier |\n\n资料来源：[crates/yantrikdb-core/src/vector/delta_index.rs]()\n\n### Two-Tier Storage Model\n\nThe storage model consists of two tiers:\n\n1. **Hot Tier (DeltaIndex)**: Contains all recent writes and tombstones not yet compacted\n2. **Cold Tier (HnswIndex)**: Immutable, compacted index containing historical data\n\n```mermaid\ngraph LR\n    subgraph HotTier[\"Hot Tier - DeltaIndex\"]\n        D1[DeltaEntry 1]\n        D2[DeltaEntry 2]\n        D3[DeltaEntry N]\n    end\n    \n    subgraph ColdTier[\"Cold Tier - HnswIndex\"]\n        H1[HnswIndex<br/>immutable]\n        H2[HnswIndex<br/>immutable]\n    end\n    \n    HotTier --> |periodic| ColdTier\n```\n\n**Invariant:** The cold tier MUST use `ArcSwap<HnswIndex>` for lock-free reader access. Replacing with `RwLock<HnswIndex>` or `Mutex<HnswIndex>` causes read latency regression.\n\n资料来源：[CONCURRENCY.md]()\n\n## Concurrency Rules\n\nThe write path enforces strict concurrency rules to prevent deadlocks and ensure forward progress under high write load.\n\n### Rule 1: Foreground Writes Must Be O(1)\n\nAll foreground write operations MUST only touch O(1) data structures:\n\n| Allowed Operations | Forbidden Operations |\n|-------------------|---------------------|\n| `DeltaIndex::append` | `HnswIndex::insert` |\n| `DeltaIndex::tombstone` | `HnswIndex::remove` |\n| `assign_seq` (atomic fetch) | `compact()` |\n| `bump_visible_seq` | Any non-O(1) lock acquisition |\n\n资料来源：[CONCURRENCY.md]()\n\n### Rule 2: Background Compaction Isolation\n\nBackground compaction MUST NOT share lock primitives with foreground writes:\n\n```mermaid\nsequenceDiagram\n    participant FW as Foreground Write\n    participant DI as DeltaIndex\n    participant CP as Compactor\n    participant HI as HnswIndex\n    \n    FW->>DI: append(entry)\n    Note over DI: Brief RwLock write<br/>O(1) push\n    \n    CP->>DI: seal_delta_for_compaction()\n    Note over DI: Brief lock for seal\n    \n    CP->>CP: clone cold + sealed entries\n    Note over CP: No locks held here\n    \n    CP->>HI: ArcSwap new index\n    Note over HI: Brief lock for install\n```\n\n**Compactor Responsibilities:**\n\n1. Call `seal_delta_for_compaction()` to get a stable snapshot\n2. Perform HNSW rebuild off the hot path\n3. Install new cold tier via `ArcSwap`\n\n资料来源：[CONCURRENCY.md]()\n\n### Rule 3: Visible Sequence Tracking\n\nThe `visible_seq` map tracks the minimum sequence number visible to readers per namespace, enabling read-your-writes (RYW) semantics.\n\n```rust\n// Type: DashMap<String, AtomicU64>\nvisible_seq: DashMap<String, AtomicU64>\n\n// Fast path reads (lock-free)\nget(ns).map(|e| e.load(Acquire))\n\n// Fast path writes\nget(ns).fetch_max(seq, Release)\n```\n\n| Property | Value |\n|----------|-------|\n| Data Structure | `dashmap::DashMap<String, AtomicU64>` |\n| Read Path | Lock-free via `AtomicU64::load(Acquire)` |\n| Write Path | Lock-free via `AtomicU64::fetch_max(Release)` |\n| Scope | Per-namespace |\n\n资料来源：[CONCURRENCY.md]()\n\n## Write Operations\n\n### Standard Write Flow\n\n```mermaid\ngraph TD\n    Start[Write Request] --> Validate{Validate}\n    Validate --> |valid| Seq[assign_seq]\n    Validate --> |invalid| Reject[Reject]\n    \n    Seq --> SQL[SQL SAVEPOINT]\n    SQL --> Delta[DeltaIndex::append]\n    Delta --> Bump[bump_visible_seq]\n    Bump --> Commit[Commit Transaction]\n    Commit --> Done[Return to Client]\n    \n    Reject --> Fail[Return Error]\n```\n\n### Record With RID Pattern\n\nAll write operations follow the `record_with_rid` pattern:\n\n```rust\n// Pattern for all write primitives\nfn write_operation(&self, ...) -> Result<RecordId> {\n    // 1. SQL with SAVEPOINT for rollback\n    let rid = sql_transaction(|| {\n        // 2. Append to DeltaIndex (O(1) push)\n        self.delta_index.append(entry)?;\n        Ok(assigned_rid)\n    })?;\n    \n    // 3. Bump visible sequence\n    self.bump_visible_seq(namespace, seq)?;\n    \n    Ok(rid)\n}\n```\n\n资料来源：[CONCURRENCY.md]()\n\n### Sequence Number Assignment\n\nThe `assign_seq` function uses atomic operations for lock-free sequence generation:\n\n```rust\n// Atomic fetch_add or fetch_max\nlet seq = self\n    .seq_counter\n    .fetch_add(1, std::sync::atomic::Ordering::Relaxed);\n```\n\nThis ensures each write receives a unique, monotonically increasing sequence number without contention.\n\n## Compaction Process\n\n### Compaction Lifecycle\n\n```mermaid\ngraph LR\n    subgraph Phase1[\"Phase 1: Seal\"]\n        A[Active Delta] --> B[Seal Delta]\n        B --> C[Frozen Snapshot]\n    end\n    \n    subgraph Phase2[\"Phase 2: Rebuild\"]\n        C --> D[Clone Cold Hnsw]\n        D --> E[Merge Sealed Entries]\n        E --> F[Build New Hnsw]\n    end\n    \n    subgraph Phase3[\"Phase 3: Install\"]\n        F --> G[ArcSwap Install]\n        G --> H[New Current Cold]\n    end\n```\n\n### Compaction Rules\n\n| Rule | Description |\n|------|-------------|\n| Lock Isolation | Compactor holds `delta` RwLock only for seal and install |\n| No Hot Locks | Between seal and install, NO locks shared with foreground |\n| ArcSwap | Cold tier replacement uses atomic pointer swap |\n| Snapshot | `seal_delta_for_compaction()` returns stable Arc snapshot |\n\n资料来源：[CONCURRENCY.md]()\n\n### Compaction Triggers\n\nThe system triggers compaction based on configurable policies:\n\n| Trigger Type | Default Cooldown | Default Expiry |\n|-------------|-----------------|----------------|\n| DecayReview | 3 days | 7 days |\n| ConsolidationReady | 1 day | 3 days |\n| ConflictEscalation | 2 days | 14 days |\n| Redundancy | 1 day | 7 days |\n| PatternDiscovered | 7 days | 7 days |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n## Read-Your-Writes Semantics\n\n### Recall With Sequence\n\nThe `recall_with_seq` method enables clients to wait for their writes to become visible:\n\n```rust\npub fn recall_with_seq(\n    &self,\n    query_embedding: &[f32],\n    top_k: usize,\n    min_seq: u64,           // Sequence from write operation\n    namespace: Option<&str>,\n    timeout: Duration,\n) -> Result<Vec<RecallResult>> {\n    let ns = namespace.unwrap_or(\"default\");\n    \n    // Wait for visible_seq to reach min_seq\n    self.wait_for_visible_seq(ns, min_seq, timeout)?;\n    \n    // Safe to recall - all writes up to min_seq are visible\n    self.recall(query_embedding, top_k, ...)\n}\n```\n\n| Parameter | Type | Description |\n|-----------|------|-------------|\n| `min_seq` | `u64` | Minimum sequence number client observed |\n| `namespace` | `Option<&str>` | Target namespace (required for correct RYW) |\n| `timeout` | `Duration` | Maximum wait time |\n\n资料来源：[crates/yantrikdb-core/src/engine/recall.rs]()\n\n### Visible Sequence Wait\n\n```mermaid\nsequenceDiagram\n    participant C as Client\n    participant VS as VisibleSeq Map\n    participant REC as Recall Engine\n    \n    C->>VS: load current seq for namespace\n    Note over VS: AtomicU64 load\n    VS-->>C: current_seq\n    \n    alt current_seq < min_seq\n        C->>C: wait_for_visible_seq()\n        loop until visible or timeout\n            C->>VS: load current seq\n            VS-->>C: current_seq\n        end\n    end\n    \n    C->>REC: recall(...)\n    REC-->>C: Results (guaranteed visible)\n```\n\n## Materializer Integration\n\nThe materializer component coordinates between the write path and the cognitive layer, processing update operations extracted from natural language input.\n\n```mermaid\ngraph TD\n    subgraph Input[\"Input Processing\"]\n        NL[Natural Language] --> EX[Extractor]\n        EX --> OT[Operation Templates]\n    end\n    \n    subgraph Write[\"Write Path\"]\n        OT --> UW[UpdateOps]\n        UW --> DI[DeltaIndex]\n        DI --> SEQ[Sequence Assignment]\n        SEQ --> VS[VisibleSeq Update]\n    end\n    \n    subgraph Cognitive[\"Cognitive Layer\"]\n        VS --> MAT[Materializer]\n        MAT --> ST[State Update]\n        ST --> GP[Graph Propagation]\n    end\n```\n\n资料来源：[crates/yantrikdb-core/src/engine/materializer.rs]()\n资料来源：[crates/yantrikdb-core/src/cognition/extractor.rs]()\n\n## Configuration\n\n### ThinkConfig Parameters\n\nThe cognition loop configuration affects compaction behavior:\n\n| Parameter | Description | Impact |\n|-----------|-------------|--------|\n| `importance_threshold` | Minimum importance for processing | Filters low-value nodes |\n| `decay_threshold` | Decay rate trigger | Affects when entries move to cold |\n| `max_triggers` | Maximum triggers per cycle | Limits resource usage |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n### Memory Query Options\n\nThe Python bindings expose configuration for recall operations:\n\n```rust\n#[pyo3(signature = (\n    query=None, embedding=None, top_k=10, memory_type=None, namespace=None,\n    time_window=None, expand_entities=false, include_consolidated=false,\n    skip_reinforce=false, domain=None, source=None\n))]\n```\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `query` | `Option<&str>` | `None` | Text query for semantic search |\n| `embedding` | `Option<Vec<f32>>` | `None` | Pre-computed embedding vector |\n| `top_k` | `usize` | `10` | Number of results to return |\n| `memory_type` | `Option<&str>` | `None` | Filter by memory type |\n| `namespace` | `Option<&str>` | `None` | Target namespace |\n| `include_consolidated` | `bool` | `false` | Include cold tier results |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs]()\n\n## Performance Characteristics\n\n### Write Path Guarantees\n\n| Metric | Guarantee |\n|--------|-----------|\n| Write Latency | O(1) for DeltaIndex append |\n| Contention | Lock-free sequence assignment |\n| Durability | SQL SAVEPOINT + DeltaIndex |\n| Visibility | Guaranteed via visible_seq |\n\n### Compaction Guarantees\n\n| Metric | Guarantee |\n|--------|-----------|\n| Lock Duration | O(1) for seal and install |\n| Hot Path Impact | Zero locks during rebuild |\n| Reader Impact | ArcSwap provides instant switch |\n| Memory | Clone-on-write for cold tier |\n\n### Read Path Guarantees\n\n| Metric | Guarantee |\n|--------|-----------|\n| Read Latency | Lock-free via ArcSwap cold tier |\n| Consistency | Read-your-writes via visible_seq |\n| Namespace Isolation | Per-namespace sequence tracking |\n\n## Error Handling\n\n### Conflict Resolution\n\nThe system tracks conflicts between memories for resolution:\n\n```rust\npub struct Conflict {\n    pub conflict_id: String,\n    pub conflict_type: String,        // identity_fact, preference, temporal\n    pub priority: String,              // critical, high, medium, low\n    pub memory_a: String,\n    pub memory_b: String,\n    pub entity: Option<String>,\n    pub detected_at: f64,\n    pub resolution_note: Option<String>,\n}\n```\n\n| Conflict Type | Default Priority |\n|--------------|------------------|\n| IdentityFact | critical |\n| Preference | high |\n| Temporal | high |\n| Consolidation | medium |\n| Minor | low |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n## Related Documentation\n\n- [Concurrency Rules](https://github.com/yantrikos/yantrikdb/blob/main/CONCURRENCY.md) - Detailed concurrency invariants\n- [Decoupled Write Path RFC](https://github.com/yantrikos/yantrikdb/blob/main/docs/decoupled_write_path_rfc.md) - Design rationale\n- [DeltaIndex Implementation](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/vector/delta_index.rs) - Source code\n- [Materializer](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/materializer.rs) - Write coordination\n\n---\n\n<a id='page-storage-engine'></a>\n\n## Storage Engine\n\n### 相关页面\n\n相关主题：[Five-Index Architecture](#page-five-index-architecture), [Decoupled Write Path (LSM Architecture)](#page-decoupled-write-path)\n\n<details>\n<summary>Relevant Source Files</summary>\n\nThe following source files were used to generate this documentation:\n\n- [crates/yantrikdb-core/src/engine/lifecycle.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/lifecycle.rs)\n- [crates/yantrikdb-core/src/engine/stats.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/stats.rs)\n- [crates/yantrikdb-core/src/py_engine/memory.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/src/py_engine/memory.rs)\n- [crates/yantrikdb-core/src/cognition/state.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n- [crates/yantrikdb-core/src/base/types.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/types.rs)\n- [crates/yantrikdb-core/src/distributed/conflict.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/distributed/conflict.rs)\n</details>\n\n# Storage Engine\n\n## Overview\n\nThe YantrikDB Storage Engine is the core persistence layer responsible for storing, retrieving, and managing memory data in the SQLite database. It handles encrypted text storage, metadata management, storage tier organization, and integrates with the materialization subsystem for asynchronous processing of memory operations.\n\nThe storage engine operates as part of the broader engine module and maintains close integration with:\n\n- The **Record System** for writing memories\n- The **Recall System** for querying memories\n- The **Materialization Pipeline** for asynchronous operation processing\n- The **Encryption Layer** for secure text storage\n\n## Core Data Models\n\n### Memory Structure\n\nThe central data structure managed by the storage engine is the `Memory` struct, which encapsulates all attributes of a stored memory:\n\n```rust\npub struct Memory {\n    pub rid: String,                      // Unique record identifier\n    pub memory_type: String,             // episodic, semantic, procedural, etc.\n    pub text: String,                    // Decrypted memory content\n    pub created_at: f64,                 // Creation timestamp\n    pub importance: f64,                 // Importance score [0.0, 1.0]\n    pub valence: f64,                    // Emotional valence [-1.0, 1.0]\n    pub half_life: f64,                  // Decay half-life in seconds\n    pub last_access: f64,                // Last access timestamp\n    pub access_count: u32,               // Number of times accessed\n    pub consolidation_status: String,     // Current consolidation state\n    pub storage_tier: String,             // hot, warm, cold, frozen\n    pub consolidated_into: Option<String>,// RID of consolidated memory\n    pub metadata: serde_json::Value,     // Encrypted JSON metadata\n    pub namespace: String,               // Logical namespace partition\n    pub certainty: f64,                  // Belief certainty [0.0, 1.0]\n    pub domain: String,                   // Domain classification\n    pub source: String,                   // Provenance source type\n    pub emotional_state: Option<String>, // Associated emotional context\n    pub session_id: Option<String>,      // Session identifier\n    pub due_at: Option<f64>,             // Due timestamp for tasks\n    pub temporal_kind: Option<String>,   // Temporal classification\n}\n```\n\n资料来源：[engine/lifecycle.rs:200-225]()\n\n### Storage Tiers\n\nYantrikDB implements a tiered storage architecture to optimize memory access patterns:\n\n| Tier | Purpose | Access Pattern |\n|------|---------|----------------|\n| `hot` | Frequently accessed memories | In-memory cache priority |\n| `warm` | Regular operational memories | Standard retrieval |\n| `cold` | Archival memories | Lazy loading |\n| `frozen` | Long-term storage | Minimal access |\n\n资料来源：[engine/lifecycle.rs:214]()\n\n### Consolidation Status\n\nMemories maintain a consolidation status indicating their state in the memory consolidation lifecycle:\n\n| Status | Description |\n|--------|-------------|\n| `observed` | Raw observation, no consolidation |\n| `inferred` | Pattern-based inference |\n| `told` | Explicitly stated by user |\n| `experimented` | Confirmed via experiment |\n| `extracted` | Extracted from external documents |\n| `consolidated` | Merged from multiple sources |\n| `system_default` | System-provided default |\n\nEach status carries a reliability prior that affects belief revision:\n\n```rust\npub fn reliability_prior(self) -> f64 {\n    match self {\n        Self::Told => 0.95,          // User explicitly stated\n        Self::Observed => 0.90,      // Directly observed\n        Self::Experimented => 0.85,  // Controlled experiment\n        Self::Extracted => 0.75,     // External documents\n        Self::Inferred => 0.60,      // Pattern inference\n        Self::Consolidated => 0.80,  // Multi-source merge\n        Self::SystemDefault => 0.50, // Defaults\n    }\n}\n```\n\n资料来源：[cognition/state.rs:180-192]()\n\n## Storage Architecture\n\n### High-Level Architecture\n\n```mermaid\ngraph TD\n    subgraph \"Python API Layer\"\n        PYM[py_engine/memory.rs]\n    end\n    \n    subgraph \"Engine Core\"\n        REC[Record System]\n        RCL[Recall System]\n        MAT[Materialization Pipeline]\n    end\n    \n    subgraph \"Storage Layer\"\n        ENG[Engine Instance]\n        SQL[(SQLite Database)]\n        CRE[Encryption Layer]\n    end\n    \n    PYM --> REC\n    PYM --> RCL\n    REC --> ENG\n    REC --> MAT\n    MAT --> ENG\n    ENG --> SQL\n    ENG --> CRE\n    CRE --> SQL\n```\n\n### Encryption Integration\n\nText fields are encrypted before storage and decrypted on retrieval to ensure data privacy:\n\n```rust\nlet text = self.decrypt_text(&row.2)?;\nlet meta_str = self.decrypt_text(&row.12)?;\nlet metadata: serde_json::Value = serde_json::from_str(&meta_str)\n    .unwrap_or(serde_json::Value::Object(Default::default()));\n```\n\n资料来源：[engine/lifecycle.rs:210-214]()\n\n## Record Operations\n\n### Recording a Memory\n\nThe storage engine provides the `record()` method for storing new memories with automatic embedding:\n\n```rust\ndb.record(\n    text,           // Memory content\n    memory_type,    // episodic, semantic, etc.\n    importance,     // Importance score\n    valence,        // Emotional valence\n    half_life,      // Decay half-life\n    &meta,          // JSON metadata\n    &emb,           // Embedding vector\n    namespace,      // Logical partition\n    certainty,      // Belief certainty\n    domain,         // Domain classification\n    source,         // Provenance source\n    emotional_state, // Emotional context\n)\n```\n\n资料来源：[py_engine/memory.rs:45-60]()\n\n### Python Bindings\n\nThe Python API exposes record functionality through `py_engine/memory.rs`:\n\n```python\n# Record a memory with auto-embedding\ndb.record(\n    text=\"Meeting with John at 3pm\",\n    memory_type=\"episodic\",\n    namespace=\"work\",\n    importance=0.8,\n    valence=0.5,\n)\n```\n\nThe `record()` method accepts these parameters:\n\n| Parameter | Type | Required | Description |\n|-----------|------|----------|-------------|\n| `text` | `str` | Yes | Memory content |\n| `embedding` | `Vec<f32>` | No | Pre-computed embedding (auto-generated if None) |\n| `memory_type` | `str` | No | Type classification |\n| `namespace` | `str` | No | Logical partition |\n| `importance` | `float` | No | Importance score (0.0-1.0) |\n| `valence` | `float` | No | Emotional valence (-1.0 to 1.0) |\n| `half_life` | `float` | No | Decay half-life in seconds |\n| `metadata` | `dict` | No | Additional JSON metadata |\n| `certainty` | `float` | No | Belief certainty |\n| `domain` | `str` | No | Domain classification |\n| `source` | `str` | No | Provenance source |\n| `emotional_state` | `str` | No | Emotional context |\n\n资料来源：[py_engine/memory.rs:30-60]()\n\n## Recall Operations\n\n### Querying Memories\n\nThe recall system retrieves memories based on embedding similarity and filters:\n\n```rust\ndb.recall(\n    &emb,                    // Query embedding\n    top_k,                   // Number of results\n    time_window,             // Optional time filter\n    memory_type,             // Type filter\n    include_consolidated,    // Include consolidated memories\n    expand_entities,         // Expand entity references\n    query,                   // Optional text query\n    skip_reinforce,          // Skip reinforcement learning\n    namespace,               // Namespace filter\n    domain,                  // Domain filter\n    source,                  // Source filter\n)\n```\n\n资料来源：[py_engine/memory.rs:105-120]()\n\n### Recall Query Builder\n\nThe `RecallQuery` struct provides a fluent interface for building recall queries:\n\n```rust\nlet mut q = yantrikdb_core::RecallQuery::new(emb).top_k(top_k);\nif let Some(mt) = memory_type {\n    q = q.memory_type(mt);\n}\nif let Some(ns) = namespace {\n    q = q.namespace(ns);\n}\nif let Some(tw) = time_window {\n    q = q.time_window(tw.0, tw.1);\n}\n```\n\n资料来源：[py_engine/memory.rs:145-155]()\n\n### Recall Parameters\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `query` | `Option<&str>` | `None` | Text query |\n| `query_embedding` | `Option<Vec<f32>>` | `None` | Pre-computed embedding |\n| `top_k` | `usize` | `10` | Number of results |\n| `time_window` | `Option<(f64, f64)>` | `None` | Time range filter |\n| `memory_type` | `Option<&str>` | `None` | Memory type filter |\n| `include_consolidated` | `bool` | `false` | Include consolidated |\n| `expand_entities` | `bool` | `true` | Expand entity references |\n| `skip_reinforce` | `bool` | `false` | Skip reinforcement |\n| `namespace` | `Option<&str>` | `None` | Namespace filter |\n| `domain` | `Option<&str>` | `None` | Domain filter |\n| `source` | `Option<&str>` | `None` | Source filter |\n\n资料来源：[py_engine/memory.rs:65-80]()\n\n## Materialization Pipeline\n\n### Overview\n\nThe materialization pipeline handles asynchronous processing of memory operations to ensure durability and consistency. It operates in phases:\n\n```mermaid\ngraph LR\n    A[Write Ops] --> B[Phase 3: Record/Rollback]\n    B --> C[Phase 4.1: Materialize Pending]\n    C --> D[Phase 4.2: Apply Updates]\n    D --> E[Phase 4.3: Post-Record Materialization]\n    E --> F[Applied]\n```\n\n### Phase 3: Record Materialization\n\nHandles synchronous recording of operations with rollback capability:\n\n```rust\n\"record\" | \"forget\" | \"relate\" | \"correct\" | \"consolidate\" => {\n    tracing::trace!(\n        target: \"yantrikdb::ingest::materialize\",\n        op_id = %op_id,\n        op_type = %op_type,\n        \"phase 3 stub: marking pending op as applied without inline materialization\"\n    );\n    if self.mark_op_applied(op_id)? {\n        applied += 1;\n    }\n}\n```\n\n资料来源：[engine/stats.rs:95-105]()\n\n### Phase 4.1-4.2: Update Operations\n\nFor update operations like task completion and status changes:\n\n```rust\n\"create_task\" | \"update_task_status\" | \"create_goal\" | \"update_goal\" \n| \"record_belief\" | \"relate_belief\" | \"update_preference\" \n| \"record_need\" | \"record_emotion\" => {\n    // Attempt materialization\n    match materialize_fn(payload) {\n        Ok(()) => {\n            if self.mark_op_applied(op_id)? {\n                applied += 1;\n            }\n        }\n        Err(e) => {\n            tracing::warn!(\n                target: \"yantrikdb::ingest::materialize\",\n                op_id = %op_id,\n                error = %e,\n                \"post-record-with-rid materialization failed; leaving pending for retry\"\n            );\n        }\n    }\n}\n```\n\n资料来源：[engine/stats.rs:70-90]()\n\n### Phase 4.3: Post-Record Materialization\n\nHandles entity and relation extraction that runs on the materializer thread to avoid blocking the foreground caller:\n\n> Mirrors the post-INSERT entity/relation extraction loop that used to live on the foreground `record()` path. Now runs on the materializer thread so the foreground caller is not blocked on the unbounded loop count.\n\n资料来源：[engine/stats.rs:130-135]()\n\n## Conflict Detection\n\nThe storage engine integrates with the conflict detection system for distributed scenarios:\n\n```rust\n// Phase 2: Evaluate each candidate pair with policy awareness\nfor (src, rel_type, dst1, dst2, vf1, vt1, vf2, vt2, namespace) in &candidates {\n    if conflicts.len() >= max_conflicts {\n        break;\n    }\n\n    // RFC 006 Phase 3: check relation policy before flagging\n    let policy: Option<(bool, bool, String)> = {\n        let conn = db.conn();\n        conn.query_row(\n            \"SELECT overlap_allowed, temporal_required, missing_time_severity \\\n             FROM relation_policies \\\n             WHERE relation_type = ?1 AND (namespace = ?2 OR namespace = '*') \\\n             ORDER BY CASE WHEN namespace = ?2 THEN 0 ELSE 1 END \\\n             LIMIT 1\",\n            params![rel_type, namespace],\n            |row| { ... }\n        )\n    };\n}\n```\n\n资料来源：[distributed/conflict.rs:85-105]()\n\n### Conflict Types\n\n| Type | Priority | Description |\n|------|----------|-------------|\n| `identity_fact` | Critical | Contradiction in core facts |\n| `preference` | High | Preference conflict |\n| `temporal` | High | Time-based conflict |\n| `consolidation` | Medium | Consolidation conflict |\n| `minor` | Low | Minor inconsistency |\n\n资料来源：[base/types.rs:50-60]()\n\n## Retrieval by ID\n\nThe `get_memory_by_rid()` method retrieves a specific memory by its record ID:\n\n```rust\npub fn get_memory_by_rid(&self, rid: &str) -> Result<Option<Memory>> {\n    let result = conn.query_row(\n        \"SELECT rid, memory_type, text, created_at, importance, valence,\n                half_life, last_access, access_count, consolidation_status,\n                storage_tier, consolidated_into, metadata, namespace,\n                certainty, domain, source, emotional_state, session_id,\n                due_at, temporal_kind\n         FROM memories WHERE rid = ?1\",\n        params![rid],\n        |row| Ok((...))  // 21 columns mapped\n    )?;\n    \n    // Decrypt and deserialize\n    let text = self.decrypt_text(&row.2)?;\n    let meta_str = self.decrypt_text(&row.12)?;\n    let metadata: serde_json::Value = serde_json::from_str(&meta_str)?;\n    \n    Ok(Some(Memory { ... }))\n}\n```\n\n资料来源：[engine/lifecycle.rs:195-225]()\n\n## Query Interface\n\nThe storage engine provides a flexible query interface combining text and embedding search:\n\n```python\n# Query memories with combined text and embedding search\nresults = db.query(\n    query=\"team meeting\",\n    embedding=None,  # Auto-generate from query\n    top_k=10,\n    memory_type=\"episodic\",\n    namespace=\"work\",\n    time_window=(start_ts, end_ts),\n    expand_entities=True,\n    include_consolidated=False,\n)\n```\n\n资料来源：[py_engine/memory.rs:85-100]()\n\n### Query vs Recall\n\n| Aspect | `query()` | `recall()` |\n|--------|-----------|------------|\n| Purpose | Combined text + embedding search | Pure embedding similarity |\n| Use Case | Exploratory queries | Memory association |\n| Parameters | Query text or embedding | Primarily embedding |\n| Filters | Full filter suite | Full filter suite |\n\n## Error Handling\n\nThe storage engine uses Rust's `Result` type for error handling with the following patterns:\n\n```rust\n.ok_or_else(|| PyRuntimeError::new_err(\"YantrikDB is closed\"))\n```\n\nErrors are propagated through the Python bindings using the `map_err` function which converts Rust errors to Python exceptions.\n\n资料来源：[py_engine/memory.rs:40]()\n\n## Performance Considerations\n\n### Memory Retrieval Optimization\n\n1. **Encryption on-demand**: Text fields are only decrypted when accessed\n2. **Lazy metadata parsing**: JSON metadata is parsed only when needed\n3. **Storage tiering**: Frequently accessed memories can be promoted to hot tier\n4. **Consolidation filtering**: `include_consolidated=false` skips consolidation lookups\n\n### Asynchronous Materialization\n\nPost-record operations run on a background materializer thread to prevent foreground blocking:\n\n> Now runs on the materializer thread so the foreground caller is not blocked on the unbounded loop count (5-15...)\n\n资料来源：[engine/stats.rs:135-138]()\n\n## Related Systems\n\n| System | Integration Point | Purpose |\n|--------|-------------------|---------|\n| Record System | `record()` | Memory creation |\n| Recall System | `recall()`, `query()` | Memory retrieval |\n| Materialization | `materialize_ops()` | Async operation processing |\n| Conflict Detection | `relation_policies` table | Distributed consistency |\n| Encryption | `decrypt_text()` | Data security |\n\n## API Reference Summary\n\n### Core Methods\n\n| Method | File | Purpose |\n|--------|------|---------|\n| `record()` | py_engine/memory.rs | Store new memory |\n| `recall()` | py_engine/memory.rs | Retrieve by embedding |\n| `query()` | py_engine/memory.rs | Combined search |\n| `get_memory_by_rid()` | engine/lifecycle.rs | Lookup by ID |\n| `materialize_ops()` | engine/stats.rs | Process pending ops |\n\n### Data Structures\n\n| Struct | File | Purpose |\n|--------|------|---------|\n| `Memory` | engine/lifecycle.rs | Core memory representation |\n| `RecallQuery` | py_engine/memory.rs | Query builder |\n| `Conflict` | base/types.rs | Conflict representation |\n\n---\n\n<a id='page-core-api'></a>\n\n## Core API Reference\n\n### 相关页面\n\n相关主题：[Overview](#page-overview), [Cognition Layer](#page-cognition-layer), [Conflict Detection and Resolution](#page-conflict-resolution)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-core/src/engine/mod.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/mod.rs)\n- [crates/yantrikdb-core/src/engine/record.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/record.rs)\n- [crates/yantrikdb-core/src/engine/recall.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/recall.rs)\n- [crates/yantrikdb-core/src/engine/graph_ops.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/graph_ops.rs)\n- [crates/yantrikdb-core/src/base/scoring.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/scoring.rs)\n</details>\n\n# Core API Reference\n\nThis page documents the core engine API of yantrikdb, covering the fundamental operations for memory storage, retrieval, graph relationships, and cognitive processing.\n\n## Overview\n\nThe Core API provides the foundational primitives for building personal memory systems. It consists of four primary subsystems:\n\n| Subsystem | Purpose |\n|-----------|---------|\n| **Record** | Store memories with importance, valence, and metadata |\n| **Recall** | Semantic search and retrieval using embeddings |\n| **Graph** | Relationship management between memory nodes |\n| **Cognitive** | Higher-order reasoning (think loop, conflict detection) |\n\n资料来源：[crates/yantrikdb-core/src/engine/mod.rs]()\n\n### Architecture Overview\n\n```mermaid\ngraph TD\n    A[User Input] --> B[Record API]\n    A --> C[Recall API]\n    A --> D[Graph API]\n    B --> E[(SQLite Storage)]\n    C --> E\n    D --> E\n    E --> F[Cognitive Engine]\n    F --> G[Think Loop]\n    G --> H[Consolidation]\n    G --> I[Conflict Detection]\n    G --> J[Pattern Mining]\n```\n\n## Memory Data Model\n\n### Core Memory Structure\n\nThe fundamental unit of storage in yantrikdb is the `Memory` struct, defined in `lifecycle.rs`:\n\n| Field | Type | Description |\n|-------|------|-------------|\n| `rid` | `String` | Unique resource identifier |\n| `memory_type` | `String` | Type classification (episodic, semantic, procedural) |\n| `text` | `String` | The actual memory content (encrypted at rest) |\n| `created_at` | `f64` | Unix timestamp of creation |\n| `importance` | `f64` | Significance score [0.0, 1.0] |\n| `valence` | `f64` | Emotional valence [-\nError with Openai API: output new_sensitive (1027)\n\nPlease check that you have set the OPENAI_API_KEY environment variable with a valid API key.\n\n---\n\n<a id='page-cognition-layer'></a>\n\n## Cognition Layer\n\n### 相关页面\n\n相关主题：[Core API Reference](#page-core-api), [Conflict Detection and Resolution](#page-conflict-resolution)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-core/src/cognition/state.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n- [crates/yantrikdb-core/src/cognition/query_dsl.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/query_dsl.rs)\n- [crates/yantrikdb-core/src/engine/query_dsl.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/query_dsl.rs)\n- [crates/yantrikdb-core/src/cognition/narrative.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/narrative.rs)\n- [crates/yantrikdb-core/src/cognition/receptivity.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/receptivity.rs)\n- [crates/yantrikdb-core/src/cognition/surfacing.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/surfacing.rs)\n- [crates/yantrikdb-core/src/cognition/extractor.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/extractor.rs)\n- [crates/yantrikdb-core/src/base/types.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/types.rs)\n</details>\n\n# Cognition Layer\n\nThe Cognition Layer is the reasoning and knowledge management subsystem of yantrikdb. It provides cognitive operations for belief revision, goal planning, intent recognition, pattern detection, and proactive user assistance. The layer orchestrates a pipeline of cognitive operators that process user interactions, external observations, and system events to maintain a dynamic model of user needs, goals, and preferences.\n\n## Architecture Overview\n\nThe Cognition Layer operates as a staged pipeline that transforms raw observations into structured cognitive entities (beliefs, goals, tasks, routines, intents) and surfaces actionable insights to the user at appropriate moments.\n\n```mermaid\ngraph TD\n    subgraph Input\n        Obs[User Observation] --> Extr[Extractor]\n        Ev[Evidence Input] --> Extr\n    end\n    \n    subgraph \"Cognitive Pipeline\"\n        Extr --> Ops[Operator Pipeline]\n        Ops --> Attend[Attend]\n        Ops --> Recall[Recall]\n        Ops --> Believe[Believe]\n        Ops --> Compare[Compare]\n        Ops --> Plan[Plan]\n        Ops --> Project[Project]\n        Ops --> Anticipate[Anticipate]\n        Ops --> Assess[Assess]\n        Ops --> Coherence[Coherence Check]\n    end\n    \n    subgraph \"Working Memory\"\n        Attend --> WS[Working Set]\n        Recall --> WS\n    end\n    \n    subgraph \"Long-term Store\"\n        WS <--> KG[Knowledge Graph]\n        KG --> Beliefs[Beliefs]\n        KG --> Goals[Goals]\n        KG --> Routines[Routines]\n    end\n    \n    subgraph \"Output\"\n        Coherence --> Surf[Surfacing]\n        Surf --> Suggest[Proactive Suggestion]\n        Surf --> SurfaceMode[Surface Modes]\n    end\n```\n\n## Node Types\n\nThe Cognition Layer manages a graph of cognitive nodes, each representing a distinct aspect of user state and knowledge.\n\n### NodeKind Classification\n\n| Kind | Description | Persistence | Typical Confidence | Typical Activation |\n|------|-------------|-------------|-------------------|-------------------|\n| Entity | Real-world objects, people, concepts | Yes | 0.70 | 0.70 |\n| Episode | Past experiences and events | Yes | 0.70 | 0.60 |\n| Belief | User's mental models and facts | Yes | 0.70 | 0.70 |\n| Goal | Desired outcomes | Yes | 0.85 | 0.75 |\n| Task | Concrete action items | Yes | 0.90 | 0.80 |\n| IntentHypothesis | Inferred user wants (transient) | No | 0.60 | 0.50 |\n| Routine | Recurring behavioral patterns | Yes | 0.70 | 0.50 |\n| Need | User requirements (Maslow-based) | Yes | 0.60 | 0.70 |\n| Opportunity | Time-bounded chances for action | Yes | 0.40 | 0.60 |\n| Risk | Potential problems | Yes | 0.40 | 0.70 |\n| Constraint | Boundaries and rules | Yes | 0.90 | 0.80 |\n| Preference | User choices and inclinations | Yes | 0.60 | 0.50 |\n| ConversationThread | Dialogue context (transient) | No | 0.90 | 0.80 |\n| ActionSchema | Reusable action templates | Yes | 0.70 | 0.40 |\n\n资料来源：[state.rs:350-370](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n### Cognitive Attributes\n\nEvery cognitive node carries a universal attribute set defining its dynamic state:\n\n| Attribute | Range | Description |\n|-----------|-------|-------------|\n| confidence | [0.0, 1.0] | Trust level in the node's accuracy |\n| activation | [0.0, 1.0] | Current spreading activation energy |\n| salience | [0.0, 1.0] | Prominence in user's attention |\n| persistence | [0.0, 1.0] | How long this node stays relevant |\n| valence | [-1.0, 1.0] | Emotional tone (negative to positive) |\n| urgency | [0.0, 1.0] | Time-critical nature |\n| novelty | [0.0, 1.0] | How surprising/unexpected (decays with repetition) |\n| volatility | [0.0, 1.0] | Rate of attribute change |\n| evidence_count | u32 | Number of supporting observations |\n| provenance | ProvenanceType | Source reliability of this node |\n\n资料来源：[state.rs:280-330](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Provenance System\n\nThe provenance system tracks the source and reliability of cognitive nodes, enabling appropriate trust calibration during reasoning.\n\n### Provenance Types\n\n| Type | Reliability Prior | Description |\n|------|------------------|-------------|\n| Told | 0.95 | User explicitly stated |\n| Observed | 0.90 | Directly observed behavior |\n| Experimented | 0.85 | Confirmed via controlled experiment |\n| Consolidated | 0.80 | Merged from multiple sources |\n| Extracted | 0.75 | From external documents |\n| Inferred | 0.60 | Pattern-based inference |\n| SystemDefault | 0.50 | Default values (weakest) |\n\n资料来源：[state.rs:220-240](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Edge Types and Activation Spreading\n\nRelationships between cognitive nodes are represented as typed edges with associated activation transfer coefficients that govern spreading activation dynamics.\n\n| Edge Type | Transfer | Description |\n|-----------|----------|-------------|\n| Causes | 0.8 | Strong causal relationship |\n| Supports | 0.7 | Confirms or strengthens target |\n| Triggers | 0.7 | Initiates target activation |\n| AdvancesGoal | 0.6 | Progresses toward goal |\n| Requires | 0.5 | Prerequisite relationship |\n| Predicts | 0.4 | Anticipatory relationship |\n| SubtaskOf | 0.4 | Decomposition hierarchy |\n| AssociatedWith | 0.3 | Weak contextual link |\n| SimilarTo | 0.3 | Analogy relationship |\n| InstanceOf | 0.3 | Classification relationship |\n| PartOf | 0.3 | Compositional relationship |\n| Prefers | 0.3 | Preference indicator |\n| PrecedesTemporally | 0.2 | Temporal ordering |\n| Contradicts | -0.4 | Mutual exclusion |\n| Prevents | -0.6 | Active blocking |\n| BlocksGoal | -0.7 | Prevents goal achievement |\n| Avoids | -0.5 | Negative preference |\n| Constrains | -0.5 | Imposes limitation |\n\n资料来源：[state.rs:150-180](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Cognitive Operators\n\nThe reasoning pipeline executes a sequence of cognitive operators in priority order. Each operator performs a specific reasoning function.\n\n### Operator Priorities\n\n| Operator | Priority | Rationale |\n|----------|----------|-----------|\n| Attend | 10 | Foundation — always run |\n| Recall | 9 | Critical for context |\n| Believe | 8 | Evidence integration |\n| Compare | 7 | Action selection |\n| Constrain | 7 | Safety — always run if comparing |\n| Plan | 6 | Means-ends reasoning |\n| Project | 5 | Forward simulation |\n| Anticipate | 4 | Proactive — nice to have |\n| Assess | 3 | Meta — can skip under pressure |\n| CoherenceCheck | 2 | Maintenance — skip if budget tight |\n\n资料来源：[query_dsl.rs:40-55](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/query_dsl.rs)\n\n### Attend Operator\n\nThe Attend operator focuses attention on seed nodes and propagates activation through the knowledge graph.\n\n```rust\npub struct AttendOp {\n    pub seeds: Vec<NodeId>,      // Starting nodes for activation\n    pub max_hops: u32,           // Maximum propagation depth\n    pub decay: f64,              // Activation decay per hop\n}\n```\n\n资料来源：[query_dsl.rs:65-70](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/query_dsl.rs)\n\n**Execution behavior:**\n- Seeds receive a +0.3 activation boost (capped at 1.0)\n- Activation spreads through edges with configurable decay\n- Returns count of activated nodes and top-activated node list\n\n资料来源：[engine/query_dsl.rs:180-210](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/query_dsl.rs)\n\n### Recall Operator\n\nThe Recall operator retrieves relevant memories from long-term storage into the working set.\n\n```rust\npub struct RecallOp {\n    pub top_k: usize,           // Maximum results\n    pub query: Option<String>,  // Text query\n    pub domain: Option<String>, // Filter by domain\n}\n```\n\n### Believe Operator\n\nThe Believe operator integrates new evidence into the belief system using Bayesian revision.\n\n```rust\npub struct BelieveOp {\n    pub evidence: EvidenceInput,  // New observation to integrate\n}\n\npub struct EvidenceInput {\n    pub target: Option<NodeId>,   // Target belief or create new\n    pub observation: String,       // The evidence\n    pub direction: f64,           // +1 = confirming, -1 = contradicting\n}\n```\n\n### Compare Operator\n\nCompares candidate actions or beliefs against constraints and preferences to select optimal choices.\n\n### Plan Operator\n\nExecutes means-ends reasoning to generate action sequences that advance specified goals.\n\n### Project Operator\n\nPerforms forward simulation to predict outcomes of potential action sequences.\n\n### Anticipate Operator\n\nIdentifies opportunities and risks based on current state and detected patterns.\n\n### Assess Operator\n\nEvaluates overall system health, belief consistency, and goal progress.\n\n### Coherence Check\n\nValidates logical consistency across beliefs and detects conflicting information.\n\n## User State Modeling\n\n### Activity Type\n\nThe system tracks the user's current activity level to calibrate interruption costs and suggestion timing.\n\n| Activity | Interruption Cost | Description |\n|----------|------------------|-------------|\n| Idle | 0.10 | No active task |\n| JustReturned | 0.30 | Recently resumed work |\n| Browsing | 0.35 | Passive consumption |\n| Communicating | 0.45 | In conversation |\n| TaskSwitching | 0.55 | Mid-task context switch |\n| FocusedWork | 0.75 | Concentration mode |\n| DeepFocus | 0.95 | Immersive concentration |\n\n资料来源：[receptivity.rs:25-50](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/receptivity.rs)\n\n### Need Categories\n\nUser needs are classified according to a needs-based taxonomy:\n\n| Category | Description |\n|----------|-------------|\n| Informational | Knowledge and learning needs |\n| Social | Connection and relationship needs |\n| Emotional | Wellbeing and mood management |\n| Organizational | Structure and order needs |\n| Creative | Expression and innovation |\n| Health | Physical wellbeing |\n| Financial | Economic security |\n| Professional | Career and productivity |\n\n资料来源：[state.rs:10-25](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Action Types\n\nCognitive agents can perform actions of different kinds, each with associated base costs:\n\n| Action | Base Cost | Description |\n|--------|-----------|-------------|\n| Abstain | 0.00 | Do nothing |\n| Inform | 0.05 | Passive information delivery |\n| Organize | 0.10 | Structure and categorization |\n| Suggest | 0.15 | Propose without commitment |\n| Communicate | 0.20 | Direct user interaction |\n| Schedule | 0.25 | Time management |\n| Warn | 0.30 | Alert about risks |\n| Execute | 0.40 | Take automated action |\n\n资料来源：[state.rs:100-130](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Task Lifecycle\n\nTasks move through a defined status workflow:\n\n```mermaid\ngraph LR\n    P[Pending] --> IP[InProgress]\n    IP --> C[Completed]\n    IP --> B[Blocked]\n    P --> CAN[Cancelled]\n    B --> IP\n    CAN --> P\n```\n\n### Task Status\n\n| Status | String Value | Description |\n|--------|--------------|-------------|\n| Pending | pending | Not yet started |\n| InProgress | in_progress | Currently being worked |\n| Completed | completed | Successfully finished |\n| Cancelled | cancelled | Abandoned without completion |\n| Blocked | blocked | Waiting on prerequisites |\n\n资料来源：[state.rs:200-230](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n## Surfacing System\n\nThe surfacing system determines when and how to present proactive suggestions to the user.\n\n### Surface Modes\n\n| Mode | Description |\n|------|-------------|\n| Immediate | Show right now |\n| Soon | Show within current context |\n| Queued | Add to notification queue |\n| Background | Process but don't interrupt |\n\n### Suppression Reasons\n\nSuggestions may be suppressed for various reasons:\n\n| Reason | Description |\n|--------|-------------|\n| LowReceptivity | User is busy |\n| ItemSuppressionRule | User preference to hide |\n| QuietHours | Outside allowed hours |\n| RateLimited | Too frequent |\n| AntiNag | Already dismissed |\n| MaxSurfaces | Budget exhausted |\n| TooSoon | Recently surfaced |\n| NotificationModeBlock | DND enabled |\n\n资料来源：[surfacing.rs:15-30](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/surfacing.rs)\n\n### ProactiveSuggestion Structure\n\n```rust\npub struct ProactiveSuggestion {\n    pub agenda_id: AgendaId,       // Source agenda item\n    pub description: String,       // Human-readable text\n    pub kind: AgendaKind,          // Type of open loop\n    pub mode: SurfaceMode,         // How prominently to show\n    pub reason: SurfaceReason,     // Why being surfaced\n    pub confidence: f64,           // Relevance score [0,1]\n    pub urgency: f64,              // Time sensitivity [0,1]\n}\n```\n\n## Conflict Detection\n\nThe system detects and manages conflicts between memories and beliefs.\n\n### Conflict Types\n\n| Type | Default Priority | Description |\n|------|-----------------|-------------|\n| IdentityFact | critical | Core identity contradiction |\n| Preference | high | Preference inconsistency |\n| Temporal | high | Time-based conflict |\n| Consolidation | medium | Merge conflict |\n| Minor | low | Minor inconsistency |\n\n资料来源：[types.rs:180-210](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/types.rs)\n\n### Conflict Resolution\n\nConflicts are resolved through a policy-aware process that checks namespace-specific policies before flagging inconsistencies. Resolution strategies include:\n\n- **Timestamp-based**: Newer observation wins\n- **Source-based**: Higher provenance reliability wins\n- **Evidence count**: More supporting observations wins\n- **Manual resolution**: User intervention required for critical conflicts\n\n## Natural Language Understanding\n\nThe extractor component converts free-text observations into structured cognitive operations:\n\n| Template | Resulting Operation |\n|----------|-------------------|\n| CreateTask | Creates new task with priority |\n| CreateGoal | Creates goal with priority |\n| SetPreference | Records preference in domain |\n| CreateNeed | Records need with category |\n| CreateRoutine | Records behavioral pattern |\n| EmotionalMarker | Logs emotional state |\n| CreateRelationship | Records person relationship |\n| Correction | Updates belief with correction |\n| TaskCompleted | Updates task status |\n\n资料来源：[extractor.rs:150-180](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/extractor.rs)\n\n## Narrative Arc Tracking\n\nThe system maintains narrative structures to track ongoing storylines in the user's life:\n\n### Arc Status\n\n| Status | Description |\n|--------|-------------|\n| Emerging | Recently detected, accumulating episodes |\n| Active | Continuously developing |\n| Paused | No recent activity, may resume |\n| Resolved | Goal achieved or concluded |\n| Abandoned | Intentionally stopped |\n\n### Chapter Types\n\nNarrative arcs are structured into chapters:\n\n| Type | Description |\n|------|-------------|\n| Setup | Initial context setting |\n| Rising | Building tension or progress |\n| Climax | Peak moment |\n| Falling | Winding down |\n| Resolution | Final conclusion |\n| Interlude | Pause or side-thread |\n\n资料来源：[narrative.rs:50-80](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/narrative.rs)\n\n## Priority Levels\n\nCognitive entities use a priority tier system for urgency-based processing:\n\n| Priority | Activation Threshold | Use Case |\n|----------|---------------------|----------|\n| Critical | 1.00 | Safety, immediate health |\n| High | 0.75 | Important deadlines |\n| Medium | 0.50 | Normal tasks |\n| Low | 0.25 | Nice-to-have items |\n\n资料来源：[state.rs:240-260](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n\n---\n\n<a id='page-conflict-resolution'></a>\n\n## Conflict Detection and Resolution\n\n### 相关页面\n\n相关主题：[Cognition Layer](#page-cognition-layer), [Core API Reference](#page-core-api)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-core/src/cognition/state.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n- [crates/yantrikdb-core/src/cognition/query_dsl.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/query_dsl.rs)\n- [crates/yantrikdb-core/src/base/types.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/base/types.rs)\n- [crates/yantrikdb-core/src/cognition/coherence.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/coherence.rs)\n- [crates/yantrikdb-core/src/cognition/surfacing.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/surfacing.rs)\n</details>\n\n# Conflict Detection and Resolution\n\nThe Conflict Detection and Resolution system is a core cognitive subsystem within yantrikdb that identifies, categorizes, and resolves contradictions between beliefs, memories, and other cognitive nodes stored in the knowledge graph. This system ensures the internal consistency of the user's cognitive model by detecting conflicts, prioritizing them based on severity, and applying appropriate resolution strategies.\n\n## Overview\n\nThe conflict resolution system operates as part of the broader cognitive engine, integrated with the coherence checking pipeline. When contradictions are detected between nodes in the belief network, the system evaluates the evidence supporting each conflicting belief and automatically or semi-automatically resolves the conflict.\n\n资料来源：[crates/yantrikdb-core/src/cognition/coherence.rs]()\n\n## Conflict Types\n\nConflicts in yantrikdb are categorized into five distinct types, each with different priority levels and default handling strategies.\n\n| Conflict Type | Priority | Description |\n|---------------|----------|-------------|\n| `IdentityFact` | Critical | Conflicts about fundamental identity or factual information |\n| `Preference` | High | Contradicting user preferences or stated likes/dislikes |\n| `Temporal` | High | Time-related contradictions (scheduling, deadlines) |\n| `Consolidation` | Medium | Conflicts arising from memory consolidation processes |\n| `Minor` | Low | Minor inconsistencies that don't affect core beliefs |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n```rust\npub enum ConflictType {\n    IdentityFact,\n    Preference,\n    Temporal,\n    Consolidation,\n    Minor,\n}\n```\n\nEach conflict type has an associated default priority that determines how urgently it should be addressed in the surfacing queue.\n\n## Conflict Data Model\n\nThe `Conflict` struct captures all metadata about a detected conflict.\n\n### Conflict Structure\n\n| Field | Type | Description |\n|-------|------|-------------|\n| `conflict_id` | String | Unique identifier for the conflict |\n| `conflict_type` | String | One of the five conflict types |\n| `priority` | String | Priority level (critical, high, medium, low) |\n| `status` | String | Current resolution status |\n| `memory_a` | String | Reference ID of first conflicting memory |\n| `memory_b` | String | Reference ID of second conflicting memory |\n| `entity` | Option\\<String\\> | Associated entity if applicable |\n| `rel_type` | Option\\<String\\> | Relationship type between memories |\n| `detected_at` | f64 | Unix timestamp when conflict was detected |\n| `detected_by` | String | Component or operator that detected the conflict |\n| `detection_reason` | String | Explanation of why this is a conflict |\n| `resolved_at` | Option\\<f64\\> | Timestamp when resolution was applied |\n| `resolved_by` | Option\\<String\\> | Resolution strategy or component |\n| `strategy` | Option\\<String\\> | Resolution strategy used |\n| `winner_rid` | Option\\<String\\> | Reference ID of winning memory |\n| `resolution_note` | Option\\<String\\> | Human-readable explanation of resolution |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n## Conflict Resolution Strategies\n\nWhen a conflict is resolved, the system generates a `ConflictResolutionResult` containing details of the resolution outcome.\n\n| Field | Type | Description |\n|-------|------|-------------|\n| `conflict_id` | String | The resolved conflict's identifier |\n| `strategy` | String | Strategy applied (e.g., \"evidence_based\", \"user_choice\") |\n| `winner_rid` | String | Reference ID of the winning memory node |\n| `loser_tombstoned` | bool | Whether the losing memory was soft-deleted |\n| `new_memory_rid` | Option\\<String\\> | ID of newly created merged memory, if applicable |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs]()\n\n### Resolution Logic\n\nThe coherence checking system applies evidence-based resolution when two beliefs contradict each other. The algorithm compares the evidence count between the two conflicting nodes:\n\n```rust\nlet (loser, winner_label) = match (node_a, node_b) {\n    (Some(a), Some(b)) => {\n        // Prefer keeping the one with more evidence.\n        if a.attrs.evidence_count >= b.attrs.evidence_count {\n            (b.id, a.label.clone())\n        } else {\n            (a.id, b.label.clone())\n        }\n    }\n    (None, Some(_)) => (contradiction.belief_a, \"unknown\".to_string()),\n    (Some(_), None) => (contradiction.belief_b, \"unknown\".to_string()),\n    (None, None) => (contradiction.belief_a, \"unknown\".to_string()),\n};\n```\n\nThe losing belief is demoted (tombstoned), and the explanation reflects which belief had higher evidence support. This approach ensures that beliefs with more corroborating evidence are preserved in the knowledge graph.\n\n资料来源：[crates/yantrikdb-core/src/cognition/coherence.rs]()\n\n## Conflict Surfacing\n\nNot all conflicts require immediate user attention. The surfacing system uses the `SurfaceReason` enum to determine when and how conflicts should be presented to the user.\n\n### Surfacing Reasons Related to Conflicts\n\n| Reason | Base Confidence | Description |\n|--------|-----------------|-------------|\n| `ConflictNeedsResolution` | 0.7 | Active conflict that requires user input |\n| `AnomalyDetected` | 0.65 | Statistical anomaly suggesting hidden conflict |\n| `UrgencyThreshold` | 0.5 | Generic urgency-based surfacing trigger |\n\n资料来源：[crates/yantrikdb-core/src/cognition/surfacing.rs]()\n\n### Surfacing Modes\n\nWhen a conflict is surfaced, the system selects an appropriate presentation mode based on urgency and priority:\n\n| Mode | Disruption Cost | Use Case |\n|------|----------------|----------|\n| `Whisper` | 0.05 | Low-priority informational notes |\n| `Nudge` | 0.25 | Moderate importance, user-initiated check |\n| `Alert` | 0.60 | High-priority conflicts requiring attention |\n| `Preempt` | 0.95 | Critical identity conflicts, immediate attention |\n\n资料来源：[crates/yantrikdb-core/src/cognition/surfacing.rs]()\n\n## Cognitive Edge Kinds and Conflict Detection\n\nThe belief network uses typed edges to represent relationships between cognitive nodes. Certain edge types are directly relevant to conflict detection.\n\n### Epistemic Edges\n\nEdges that participate in belief revision and conflict detection:\n\n| Edge Kind | Activation Transfer | Role |\n|-----------|---------------------|------|\n| `Supports` | 0.7 | Positive evidence for a belief |\n| `Contradicts` | -0.5 | Direct opposition between beliefs |\n\nThe system identifies conflicts when a `Contradicts` edge exists between two belief nodes. These edges have negative activation transfer, meaning they inhibit the target node's activation level.\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs]()\n\n### Edge Classification Methods\n\n```rust\n/// Whether this edge participates in belief revision.\npub fn is_epistemic(self) -> bool {\n    matches!(self, Self::Supports | Self::Contradicts)\n}\n\n/// Whether this edge type is inhibitory (suppresses target activation).\npub fn is_inhibitory(self) -> bool {\n    self.activation_transfer() < 0.0\n}\n```\n\n## Coherence Checking Pipeline\n\nThe coherence checking system is responsible for detecting conflicts as part of the cognitive processing pipeline.\n\n### Operator Priority in Cognitive Loop\n\n| Operator | Priority | Role |\n|----------|----------|------|\n| `Attend` | 10 | Foundation - always run |\n| `Recall` | 9 | Critical for context |\n| `Believe` | 8 | Evidence integration |\n| `Compare` | 7 | Action selection |\n| `Constrain` | 7 | Safety - always run if comparing |\n| `Plan` | 6 | Means-ends reasoning |\n| `Project` | 5 | Forward simulation |\n| `Anticipate` | 4 | Proactive - nice to have |\n| `Assess` | 3 | Meta - can skip under pressure |\n| `CoherenceCheck` | 2 | Maintenance - skip if budget tight |\n\nThe `CoherenceCheck` operator has the lowest priority, meaning it may be skipped when computational budget is constrained. This design ensures that core cognitive functions (attention, recall, belief integration) always execute first.\n\n资料来源：[crates/yantrikdb-core/src/cognition/query_dsl.rs]()\n\n### Fragmentation Detection\n\nThe coherence system also monitors attention fragmentation—how evenly distributed activation is across working set nodes:\n\n```rust\nfn compute_fragmentation(ws: &WorkingSet) -> f64 {\n    if ws.len() <= 1 {\n        return 0.0;\n    }\n    \n    let activations: Vec<f64> = ws.iter().map(|n| n.attrs.activation).collect();\n    let total: f64 = activations.iter().sum();\n    \n    if total <= 0.0 {\n        return 0.0;\n    }\n    \n    // Normalized entropy (Shannon entropy / max entropy)\n    let n = activations.len() as f64;\n    // ...\n}\n```\n\nHigh fragmentation (many nodes with similar activation) can indicate unresolved conflicts competing for attention.\n\n资料来源：[crates/yantrikdb-core/src/cognition/coherence.rs]()\n\n## Python API\n\nThe Python bindings expose conflict resolution through the `PyConflictEngine` interface.\n\n### Key Methods\n\n| Method | Parameters | Return | Description |\n|--------|------------|--------|-------------|\n| `list_conflicts` | `namespace`, `limit` | Vec\\<Dict\\> | List conflicts in namespace |\n| `get_conflict` | `conflict_id` | Option\\<Dict\\> | Retrieve specific conflict |\n| `resolve_conflict` | `conflict_id`, `strategy`, `winner_rid`, `new_text`, `resolution_note` | Dict | Apply resolution strategy |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/cognition.rs]()\n\n### Resolution Example\n\n```python\nresult = db.resolve_conflict(\n    conflict_id=\"conflict_123\",\n    strategy=\"evidence_based\",\n    winner_rid=\"memory_456\",\n    new_text=None,\n    resolution_note=\"Preferred belief with higher evidence count\"\n)\n```\n\n## System Architecture\n\n```mermaid\ngraph TD\n    subgraph \"Cognitive Engine\"\n        A[CognitiveNode Storage] --> B[CoherenceChecker]\n        B --> C[ConflictDetector]\n        C --> D[Conflict Queue]\n        D --> E[SurfaceReason Evaluator]\n        E --> F[ProactiveSuggestion Generator]\n    end\n    \n    subgraph \"Conflict Types\"\n        G[IdentityFact]\n        H[Preference]\n        I[Temporal]\n        J[Consolidation]\n        K[Minor]\n    end\n    \n    subgraph \"Resolution Outcomes\"\n        L[Winner Selected]\n        M[Loser Tombstoned]\n        N[New Merged Memory]\n        O[User Notified]\n    end\n    \n    C --> G\n    C --> H\n    C --> I\n    C --> J\n    C --> K\n    \n    F --> L\n    F --> M\n    F --> N\n    F --> O\n```\n\n## Provenance and Reliability\n\nConflicts are detected based on the provenance source of each memory node. Different provenance types have different reliability priors:\n\n| Provenance | Reliability Prior | Description |\n|------------|------------------|-------------|\n| `Told` | 0.95 | User explicitly stated - highest trust |\n| `Observed` | 0.90 | Directly observed behavior |\n| `Experimented` | 0.85 | Confirmed via controlled experiment |\n| `Consolidated` | 0.80 | Merged from multiple sources |\n| `Extracted` | 0.75 | From external documents |\n| `Inferred` | 0.60 | Pattern-based inference |\n| `SystemDefault` | 0.50 | Default values - weakest |\n\nWhen conflicts involve memories with different provenance sources, the system considers reliability priors in its resolution strategy.\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs]()\n\n## Summary\n\nThe Conflict Detection and Resolution system in yantrikdb provides a robust mechanism for maintaining cognitive consistency:\n\n1. **Detection**: Conflicts are identified through the `Contradicts` edge type in the belief network and during coherence checking operations.\n\n2. **Classification**: Conflicts are categorized into five types with associated priority levels, enabling appropriate handling based on severity.\n\n3. **Resolution**: Evidence-based resolution selects the belief with higher evidence count as the winner, with the losing belief being tombstoned.\n\n4. **Surfacing**: High-priority conflicts trigger the surfacing system to proactively notify the user through appropriate channels (whisper, nudge, alert, or preempt).\n\n5. **Integration**: The system integrates with the broader cognitive engine, respecting operator priorities and computational budgets.\n\n---\n\n<a id='page-mcp-server'></a>\n\n## MCP Server Integration\n\n### 相关页面\n\n相关主题：[Python Bindings](#page-python-bindings)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [src/yantrikdb/mcp/server.py](https://github.com/yantrikos/yantrikdb/blob/main/src/yantrikdb/mcp/server.py)\n- [src/yantrikdb/mcp/tools.py](https://github.com/yantrikos/yantrikdb/blob/main/src/yantrikdb/mcp/tools.py)\n- [src/yantrikdb/mcp/resources.py](https://github.com/yantrikos/yantrikdb/blob/main/src/yantrikdb/mcp/resources.py)\n- [MCP_REDESIGN.md](https://github.com/yantrikos/yantrikdb/blob/main/MCP_REDESIGN.md)\n</details>\n\n# MCP Server Integration\n\nThe MCP (Model Context Protocol) Server Integration provides a standardized interface for AI agents to interact with YantrikDB's persistent cognitive memory. This integration enables AI assistants—including Claude Code, Cursor, Windsurf, and any MCP-compatible client—to automatically remember decisions, recall relevant context, and detect contradictions without explicit user prompting.\n\n## Overview\n\nThe MCP server is a Python-based component built on top of the FastMCP framework that exposes YantrikDB's core capabilities through the Model Context Protocol. This allows AI agents to maintain persistent memory across sessions, automatically consolidating experiences over time.\n\n资料来源：[MCP_REDESIGN.md:1-20]()\n\n### Core Objectives\n\nThe MCP integration was designed to achieve the following success criteria:\n\n| Criterion | Description |\n|-----------|-------------|\n| Zero-configuration setup | `pip install yantrikdb[mcp]` with 3 lines in mcp.json |\n| Automatic memory recall | Agent recalls relevant context at conversation start |\n| Automatic memory storage | Agent remembers decisions, preferences, corrections |\n| Conflict detection | Agent surfaces contradictions naturally |\n| Cross-platform compatibility | Works with Claude Code, Cursor, Windsurf, and any MCP client |\n| Fast first-run | Database initialization completes in under 30 seconds |\n| Session persistence | Memory persists across sessions with gradual consolidation |\n\n资料来源：[MCP_REDESIGN.md:50-58]()\n\n## Architecture\n\n### Component Structure\n\nThe MCP server is organized into three primary modules within `src/yantrikdb/mcp/`:\n\n```mermaid\ngraph TD\n    A[MCP Client<br/>Claude Code, Cursor] --> B[server.py<br/>FastMCP Lifespan]\n    B --> C[tools.py<br/>10 Tool Definitions]\n    B --> D[resources.py<br/>MCP Resources]\n    C --> E[YantrikDB Core<br/>Rust Engine]\n    D --> E\n    E --> F[SQLite Database<br/>memory.db]\n```\n\n资料来源：[MCP_REDESIGN.md:25-35]()\n\n### File Structure\n\n| File | Purpose |\n|------|---------|\n| `server.py` | FastMCP server initialization, lifespan context, YantrikDB + embedder initialization |\n| `tools.py` | 10 tool definitions: remember, recall, relate, entities, beliefs, conflicts, patterns, consolidate, forget, stats |\n| `resources.py` | MCP resource handlers for dynamic data access |\n| `__init__.py` | Main entry point for the MCP command |\n\n资料来源：[MCP_REDESIGN.md:20-30]()\n\n## Available Tools\n\nThe MCP server exposes 10 core tools that AI agents can invoke. Each tool is designed with rich descriptions that guide auto-pilot behavior.\n\n资料来源：[MCP_REDESIGN.md:31-35]()\n\n### Tool Reference\n\n| Tool | Purpose | Key Parameters |\n|------|---------|----------------|\n| `remember` | Store a memory with embedding | `text`, `memory_type`, `importance`, `domain`, `namespace` |\n| `recall` | Retrieve semantically similar memories | `query`, `top_k`, `memory_type`, `domain`, `time_window` |\n| `relate` | Create a relationship between two entities | `src`, `dst`, `rel_type` |\n| `entities` | Query entity graph | `query`, `entity_type`, `top_k` |\n| `beliefs` | Access the belief graph | `query`, `include_inferred` |\n| `conflicts` | List detected memory conflicts | `status`, `priority`, `limit` |\n| `patterns` | Discover recurring patterns | `domain`, `min_confidence`, `limit` |\n| `consolidate` | Trigger memory consolidation | `aggressive` |\n| `forget` | Remove specific memories | `rid` or `query` |\n| `stats` | Get memory statistics | - |\n\n资料来源：[MCP_REDESIGN.md:32-35]()\n\n### Tool Description Quality\n\nThe tool descriptions are designed to be comprehensive, telling agents not just what each tool does but *when* to call it. This guidance supports auto-pilot behavior, similar to the instruction blocks in Claude Code's configuration.\n\n资料来源：[MCP_REDESIGN.md:40-42]()\n\n## Configuration\n\n### Environment Variables\n\nThe MCP server accepts configuration through environment variables:\n\n| Variable | Default | Description |\n|----------|---------|-------------|\n| `YANTRIKDB_DB_PATH` | `memory.db` | Path to the SQLite database file |\n| `YANTRIKDB_EMBEDDING_MODEL` | `potion-base-2M` | Embedding model to use |\n| `YANTRIKDB_EMBEDDING_DIM` | 64 | Embedding dimension |\n\n资料来源：[MCP_REDESIGN.md:36-38]()\n\n### Installation\n\n```bash\npip install yantrikdb[mcp]\n```\n\nAfter installation, the server can be started with:\n\n```bash\nyantrikdb-mcp\n```\n\n### MCP Client Configuration\n\nAdd the following to your MCP client configuration (e.g., `mcp.json`):\n\n```json\n{\n  \"mcpServers\": {\n    \"yantrikdb\": {\n      \"command\": \"yantrikdb-mcp\",\n      \"env\": {\n        \"YANTRIKDB_DB_PATH\": \"/path/to/memory.db\"\n      }\n    }\n  }\n}\n```\n\n## Advanced Capabilities\n\nBeyond the basic remember/recall flow, the MCP server exposes advanced YantrikDB capabilities that are available from the Rust engine but not yet fully utilized by the agent workflow.\n\n资料来源：[MCP_REDESIGN.md:60-70]()\n\n### RecallQuery Builder Options\n\nThe underlying Rust engine supports rich query building:\n\n| Parameter | Type | Description |\n|-----------|------|-------------|\n| `top_k` | `usize` | Number of results to return |\n| `memory_type` | `string` | Filter by episodic, semantic, procedural, declarative |\n| `namespace` | `string` | Logical data partitioning |\n| `time_window` | `(f64, f64)` | Unix timestamp range filter |\n| `domain` | `string` | Subject area filter |\n| `source` | `string` | Memory origin filter |\n| `expand_entities` | `bool` | Include related entity details |\n\n资料来源：[crates/yantrikdb-core/src/cognition/query_dsl.rs:1-20]()\n\n### Conflict Resolution\n\nThe engine supports multiple conflict resolution strategies:\n\n| Strategy | Description |\n|----------|-------------|\n| `keep_a` | Preserve the first memory |\n| `keep_b` | Preserve the second memory |\n| `merge` | Combine both memories with temporal ordering |\n| `ask_user` | Defer resolution to user input |\n\n资料来源：[MCP_REDESIGN.md:62-65]()\n\n### Pattern Mining\n\nPattern mining can be configured with:\n\n- Custom confidence thresholds\n- Domain-specific pattern detection\n- Temporal pattern analysis\n- Entity relationship patterns\n\n资料来源：[MCP_REDESIGN.md:63-65]()\n\n### Personality Profile Extraction\n\nThe engine can extract personality profiles from memory interactions, enabling more personalized agent behavior over time.\n\n资料来源：[MCP_REDESIGN.md:64-66]()\n\n### Spaced Repetition Reinforcement\n\nMemory access automatically triggers spaced repetition reinforcement, strengthening frequently accessed memories and allowing less-used ones to decay naturally.\n\n资料来源：[MCP_REDESIGN.md:65-67]()\n\n### Batch Operations\n\nThe Python bindings support batch record operations for efficiency:\n\n```python\ndb = yantrikdb.YantrikDB.with_default(\"memory.db\")\ndb.record_batch([\n    {\"text\": \"Memory 1\", \"importance\": 0.8},\n    {\"text\": \"Memory 2\", \"importance\": 0.6},\n])\n```\n\n### Replication and Sync\n\nYantrikDB supports CRDT-based replication for multi-device synchronization:\n\n```python\nops = db.extract_ops_since(since_hlc=hlc, since_op_id=op_id)\ndb.apply_ops(ops)\n```\n\n资料来源：[crates/yantrikdb-python/src/py_engine/sync.rs:1-30]()\n\n## Memory Types\n\nThe system supports four primary memory types, each serving distinct cognitive purposes:\n\n| Memory Type | Purpose | Typical Use Case |\n|-------------|---------|------------------|\n| `episodic` | Temporal experiences and events | \"Yesterday I talked about project X\" |\n| `semantic` | Factual knowledge and concepts | \"The user prefers dark mode\" |\n| `procedural` | How-to knowledge and skills | \"How to run the test suite\" |\n| `declarative` | Explicitly stated facts | \"The deadline is March 30\" |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:1-30]()\n\n## Entity and Belief Management\n\n### Entity Graph\n\nThe entity graph maintains relationships between extracted entities:\n\n```mermaid\ngraph LR\n    A[Alice] -->|leads| B[Engineering]\n    B -->|part_of| C[Company]\n    A -->|works_with| D[Bob]\n```\n\n### Belief System\n\nBeliefs have provenance types indicating their source reliability:\n\n| Provenance | Reliability Prior | Description |\n|------------|-------------------|-------------|\n| `told` | 0.95 | User explicitly stated |\n| `observed` | 0.90 | Directly observed behavior |\n| `experimented` | 0.85 | Confirmed via controlled experiment |\n| `extracted` | 0.75 | From external documents |\n| `inferred` | 0.60 | Pattern-based inference |\n| `consolidated` | 0.80 | Merged from multiple sources |\n| `system_default` | 0.50 | Default values |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:1-50]()\n\n## Workflow Examples\n\n### Basic Memory Storage and Retrieval\n\n```python\n# Using Python library directly\nimport yantrikdb\n\ndb = yantrikdb.YantrikDB.with_default(\"memory.db\")\n\n# Store a memory\ndb.record(\"Alice is the engineering lead\", importance=0.8, domain=\"people\")\n\n# Retrieve relevant memories\nresults = db.recall(\"who leads the team?\", top_k=3)\n\n# Create a relationship\ndb.relate(\"Alice\", \"Engineering\", \"leads\")\n```\n\n### Triggering Cognitive Processing\n\n```python\n# Run the think() cognition loop\ndb.think()  # consolidate, detect conflicts, mine patterns\n```\n\nThis single call triggers:\n- Memory consolidation\n- Conflict detection and resolution\n- Pattern mining\n\n资料来源：[README.md:1-40]()\n\n## Cognitive Triggers\n\nThe system supports multiple trigger types for proactive memory maintenance:\n\n| Trigger | Default Cooldown | Default Expiry | Purpose |\n|---------|------------------|----------------|---------|\n| `decay_review` | 3 days | 7 days | Memory decay review |\n| `consolidation_ready` | 1 day | 3 days | Consolidation queue processing |\n| `conflict_escalation` | 2 days | 14 days | Unresolved conflict handling |\n| `temporal_drift` | 14 days | 7 days | Temporal anomaly detection |\n| `redundancy` | 1 day | 7 days | Duplicate memory cleanup |\n| `relationship_insight` | 7 days | 7 days | Entity relationship discovery |\n| `valence_trend` | 7 days | 7 days | Emotional pattern tracking |\n| `entity_anomaly` | 7 days | 7 days | Unusual entity behavior |\n| `pattern_discovered` | 7 days | 7 days | New pattern identification |\n\n资料来源：[crates/yantrikdb-core/src/base/types.rs:1-50]()\n\n## Future Enhancements\n\nThe MCP_REDESIGN.md outlines planned improvements:\n\n1. **Rich tool descriptions** — More detailed examples for auto-pilot behavior\n2. **Server instructions** — System prompt injection for agent guidance\n3. **Better error messages** — More informative feedback for debugging\n4. **Streaming responses** — For long-running operations\n5. **Progress indicators** — Real-time feedback during consolidation\n\n资料来源：[MCP_REDESIGN.md:45-55]()\n\n## See Also\n\n- [YantrikDB Core Engine](../core/engine.md)\n- [Python API Reference](../python/api.md)\n- [Conflict Resolution](../core/conflict-resolution.md)\n- [Consolidation System](../core/consolidation.md)\n\n---\n\n<a id='page-python-bindings'></a>\n\n## Python Bindings\n\n### 相关页面\n\n相关主题：[MCP Server Integration](#page-mcp-server), [Installation](#page-installation), [Core API Reference](#page-core-api)\n\n<details>\n<summary>相关源码文件</summary>\n\n以下源码文件用于生成本页说明：\n\n- [crates/yantrikdb-python/src/py_engine/memory.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/src/py_engine/memory.rs)\n- [crates/yantrikdb-python/src/py_engine/session_temporal.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/src/py_engine/session_temporal.rs)\n- [crates/yantrikdb-python/src/py_types.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-python/src/py_types.rs)\n- [crates/yantrikdb-core/src/engine/recall.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/engine/recall.rs)\n- [crates/yantrikdb-core/src/cognition/state.rs](https://github.com/yantrikos/yantrikdb/blob/main/crates/yantrikdb-core/src/cognition/state.rs)\n</details>\n\n# Python Bindings\n\n## Overview\n\nThe Python bindings provide a native Python interface to yantrikdb, enabling Python developers to interact with the memory database using familiar Python idioms. Built on top of the Rust core using [pyo3](https://pyo3.rs/), the bindings expose the full functionality of yantrikdb while maintaining Pythonic conventions for parameter ordering and default values.\n\nThe `PyYantrikDB` class serves as the primary entry point, offering methods for recording memories, querying with semantic search, managing relationships between entities, and triggering cognitive consolidation processes.\n\n## Architecture\n\n```mermaid\ngraph TD\n    A[Python Application] --> B[PyYantrikDB]\n    B --> C[pyo3 Bridge Layer]\n    C --> D[yantrikdb-core]\n    D --> E[SQLite Storage]\n    D --> F[Vector Index]\n    \n    G[py_types.rs] --> C\n    G --> H[Type Conversions]\n    H --> I[PyObject ↔ Rust Structs]\n    \n    J[Default Embedder] --> B\n    J --> K[potion-base-2M<br/>dim=64]\n```\n\nThe binding layer consists of three main components:\n\n| Component | File | Purpose |\n|-----------|------|---------|\n| PyYantrikDB | `py_engine/mod.rs` | Main Python class exposing all methods |\n| Type Conversions | `py_types.rs` | Bidirectional conversion between Rust and Python types |\n| Engine Bridge | `py_engine/*.rs` | Method implementations delegating to core |\n\n## Core API Methods\n\n### Recording Memories\n\nThe `record()` method stores new memories in the database with semantic embeddings. It accepts text input and generates embeddings automatically using the bundled embedder, or accepts pre-computed embeddings for efficiency.\n\n```python\ndb.record(\n    text=\"Alice is the engineering lead\",\n    memory_type=\"episodic\",\n    importance=0.8,\n    valence=0.0,\n    half_life=604800.0,\n    certainty=0.8,\n    domain=\"people\",\n    source=\"user\",\n    namespace=\"default\",\n    emotional_state=None\n)\n```\n\n**Parameters:**\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `text` | `str` | Required | The memory content to store |\n| `memory_type` | `str` | `\"episodic\"` | Memory classification (episodic, semantic, etc.) |\n| `importance` | `float` | `0.5` | Significance score [0.0, 1.0] |\n| `valence` | `float` | `0.0` | Emotional valence [-1.0, 1.0] |\n| `half_life` | `float` | `604800.0` | Decay period in seconds (7 days default) |\n| `certainty` | `float` | `0.8` | Confidence in the memory's accuracy |\n| `domain` | `str` | `\"general\"` | Knowledge domain category |\n| `source` | `str` | `\"user\"` | Origin of the memory |\n| `namespace` | `str` | `\"default\"` | Logical partition for data isolation |\n| `emotional_state` | `str` | `None` | Emotional context at recording time |\n| `embedding` | `List[float]` | `None` | Pre-computed vector (auto-generated if omitted) |\n| `metadata` | `dict` | `None` | Arbitrary key-value metadata |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:13-30]()\n\n### Querying and Recall\n\nThe `recall()` method performs semantic search over stored memories, returning results ranked by relevance. It supports both text queries and pre-computed embedding vectors.\n\n```python\nresults = db.recall(\n    query=\"who leads the team?\",\n    top_k=10,\n    memory_type=None,\n    namespace=None,\n    time_window=None,\n    include_consolidated=False,\n    expand_entities=True,\n    skip_reinforce=False,\n    domain=None,\n    source=None\n)\n```\n\n**Parameters:**\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `query` | `str` | `None` | Natural language search query |\n| `query_embedding` | `List[float]` | `None` | Pre-computed embedding vector |\n| `top_k` | `int` | `10` | Maximum results to return |\n| `time_window` | `Tuple[float, float]` | `None` | Filter by Unix timestamp range |\n| `memory_type` | `str` | `None` | Filter by memory type |\n| `namespace` | `str` | `None` | Filter by namespace |\n| `domain` | `str` | `None` | Filter by domain |\n| `source` | `str` | `None` | Filter by source |\n| `include_consolidated` | `bool` | `False` | Include consolidated memories |\n| `expand_entities` | `bool` | `True` | Expand entity references |\n| `skip_reinforce` | `bool` | `False` | Skip reinforcement learning update |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:60-85]()\n\nThe `recall_text()` method provides a simplified interface for text-based queries with optional filtering:\n\n```python\nresults = db.recall_text(\n    query=\"who leads the team?\",\n    top_k=10,\n    namespace=None,\n    domain=None,\n    source=None\n)\n```\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:125-145]()\n\n### Procedural Memory\n\nProcedural memory stores task-related information and supports reinforcement learning for effectiveness tracking.\n\n```python\n# Record a procedural memory\nrid = db.record_procedural(\n    text=\"How to deploy to production\",\n    domain=\"devops\",\n    task_context=\"deployment workflow\",\n    effectiveness=0.5,\n    namespace=\"default\"\n)\n\n# Reinforce based on outcome\ndb.reinforce_procedural(rid, outcome=0.9)\n```\n\n**Parameters for `record_procedural`:**\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `text` | `str` | Required | Procedure description |\n| `embedding` | `List[float]` | `None` | Pre-computed vector |\n| `domain` | `str` | `\"general\"` | Task domain |\n| `task_context` | `str` | `\"\"` | Contextual information |\n| `effectiveness` | `float` | `0.5` | Initial effectiveness score |\n| `namespace` | `str` | `\"default\"` | Namespace partition |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/session_temporal.rs:45-60]()\n\n### Memory Correction\n\nThe `correct()` method allows updating existing memories with corrections, maintaining an audit trail of original content.\n\n```python\nresult = db.correct(\n    rid=\"existing-memory-rid\",\n    new_text=\"Updated information\",\n    new_importance=0.9,\n    new_valence=0.2,\n    embedding=None,\n    correction_note=\"Corrected factual error\"\n)\n```\n\n**Return Value:**\n\n| Field | Type | Description |\n|-------|------|-------------|\n| `original_rid` | `str` | ID of the original memory |\n| `corrected_rid` | `str` | ID of the new corrected memory |\n| `original_tombstoned` | `bool` | Whether original was soft-deleted |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:100-115]()\n\n### Memory Decay\n\nThe `decay()` method triggers decay calculations across all memories based on access patterns and half-life values.\n\n```python\ndecayed = db.decay(threshold=0.01)\n```\n\n**Parameters:**\n\n| Parameter | Type | Default | Description |\n|-----------|------|---------|-------------|\n| `threshold` | `float` | `0.01` | Minimum importance to retain |\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:88-95]()\n\n## Default Embedding Model\n\nThe Python bindings include a bundled embedder (`potion-base-2M`) that provides 64-dimensional embeddings out of the box. This eliminates dependencies on external services like sentence-transformers or ONNX runtime.\n\n```mermaid\ngraph LR\n    A[Input Text] --> B[pyo3 embed_text]\n    B --> C[potion-base-2M]\n    C --> D[64-dim Vector]\n    D --> E[Storage/Recall]\n```\n\nThe embedder is invoked automatically when `embedding` parameters are omitted:\n\n```python\n# Auto-embedding\ndb.record(\"Alice is the engineering lead\")\n\n# Manual embedding\nvector = [0.1, 0.2, ...]  # 64 floats\ndb.record(\"Alice is the engineering lead\", embedding=vector)\n```\n\n资料来源：[README.md](https://github.com/yantrikos/yantrikdb/blob/main/README.md)\n\n## Initialization and Configuration\n\n### Creating a Database Instance\n\n```python\nimport yantrikdb\n\n# Default instance with bundled embedder\ndb = yantrikdb.YantrikDB.with_default(\"memory.db\")\n\n# Work with the database\ndb.record(\"Memory content\", importance=0.8)\nresults = db.recall(\"Query text\")\n\n# Always close when done\ndb.close()\n```\n\n资料来源：[README.md](https://github.com/yantrikos/yantrikdb/blob/main/README.md)\n\n## Type Conversions\n\nThe `py_types.rs` module handles bidirectional conversion between Rust structs and Python objects:\n\n| Rust Type | Python Type | Conversion Function |\n|-----------|-------------|---------------------|\n| `yantrikdb_core::Memory` | `dict` | `memory_to_dict()` |\n| `yantrikdb_core::RecallResult` | `dict` | `recall_result_to_dict()` |\n| `serde_json::Value` | `PyObject` | `json_to_py()` |\n| `Bound<PyDict>` | `serde_json::Value` | `py_to_json()` |\n\n资料来源：[crates/yantrikdb-python/src/py_types.rs:6-40]()\n\n### Memory to Dictionary\n\nThe `memory_to_dict()` function converts a core Memory struct to a Python dictionary matching the Python engine's expected output format:\n\n```rust\npub fn memory_to_dict(py: Python<'_>, mem: &yantrikdb_core::Memory) -> PyResult<PyObject> {\n    let dict = PyDict::new(py);\n    dict.set_item(\"rid\", &mem.rid)?;\n    dict.set_item(\"type\", &mem.memory_type)?;\n    dict.set_item(\"text\", &mem.text)?;\n    dict.set_item(\"created_at\", mem.created_at)?;\n    dict.set_item(\"importance\", mem.importance)?;\n    // ... additional fields\n    Ok(dict.into())\n}\n```\n\n资料来源：[crates/yantrikdb-python/src/py_types.rs:8-25]()\n\n## Return Value Structure\n\n### Recall Results\n\nQuery results are returned as Python dictionaries with the following structure:\n\n```python\n{\n    \"rid\": \"memory-unique-id\",\n    \"type\": \"episodic\",\n    \"text\": \"Memory content\",\n    \"score\": 0.95,           # Relevance score\n    \"created_at\": 1234567890.0,\n    \"importance\": 0.8,\n    \"valence\": 0.0,\n    \"half_life\": 604800.0,\n    \"last_access\": 1234567890.0,\n    \"access_count\": 5,\n    \"consolidation_status\": \"stable\",\n    \"storage_tier\": \"hot\",\n    \"namespace\": \"default\",\n    \"certainty\": 0.8,\n    \"domain\": \"people\",\n    \"source\": \"user\",\n    \"emotional_state\": None,\n    \"metadata\": {}\n}\n```\n\n## Advanced Query Options\n\n### Recall with Sequence Verification\n\nFor applications requiring strong consistency guarantees, `recall_with_seq()` ensures query results reflect all prior writes:\n\n```python\n# After a write operation\ndb.record(\"New memory\", namespace=\"work\")\n\n# Ensure subsequent recall sees the write\nresults = db.recall_with_seq(\n    query_embedding=embedding,\n    top_k=10,\n    min_seq=prior_sequence,\n    namespace=\"work\",\n    timeout=timedelta(seconds=5)\n)\n```\n\n资料来源：[crates/yantrikdb-core/src/engine/recall.rs:50-80]()\n\n### Time-Window Filtering\n\nResults can be filtered to a specific time range using Unix timestamps:\n\n```python\nimport time\n\nnow = time.time()\nweek_ago = now - 604800  # 7 days\n\nresults = db.recall(\n    query=\"meetings\",\n    time_window=(week_ago, now)\n)\n```\n\n## Memory Types\n\nyantrikdb supports multiple memory types for different kinds of information:\n\n| Type | Description |\n|------|-------------|\n| `entity` | Factual knowledge about entities |\n| `episode` |事件性记忆 |\n| `belief` | User beliefs and opinions |\n| `goal` | Goals and objectives |\n| `task` | Tasks and action items |\n| `intent_hypothesis` | Hypothesized user intents |\n| `routine` | Recurring behavioral patterns |\n| `need` | User needs and requirements |\n| `opportunity` | Time-bounded opportunities |\n| `risk` | Potential problems |\n| `preference` | User preferences |\n| `conversation_thread` | Conversational context |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:120-145]()\n\n## Relationship Management\n\nBeyond storing individual memories, yantrikdb supports graph-like relationships between entities:\n\n```python\n# Define relationships\ndb.relate(\"Alice\", \"Engineering\", \"leads\")\ndb.relate(\"Alice\", \"Bob\", \"manages\")\n\n# Query relationships\nedges = db.get_edges(\"Alice\")\n```\n\nThe system supports relationship types including:\n\n| Type | Description |\n|------|-------------|\n| `supports` | Supporting evidence |\n| `contradicts` | Contradicting information |\n| `causes` | Causal relationship |\n| `predicts` | Predictive relationship |\n| `requires` | Prerequisite relationship |\n| `associated_with` | General association |\n| `similar_to` | Similarity connection |\n\n资料来源：[crates/yantrikdb-core/src/cognition/state.rs:200-230]()\n\n## Cognitive Processing\n\n### Think Operation\n\nThe `think()` method triggers the cognitive processing pipeline:\n\n```python\ndb.think()  # Consolidate, detect conflicts, mine patterns\n```\n\nThis operation:\n1. Consolidates related memories\n2. Detects conflicts between beliefs\n3. Mines patterns from episodic data\n4. Updates procedural memory effectiveness\n\n## Error Handling\n\nThe Python bindings map Rust errors to appropriate Python exceptions:\n\n| Rust Error | Python Exception |\n|------------|------------------|\n| `RuntimeError` | `RuntimeError` |\n| `ValueError` | `ValueError` |\n| Storage errors | `RuntimeError` |\n\n```python\ntry:\n    db.record(\"Memory\")\nexcept RuntimeError as e:\n    print(f\"Database error: {e}\")\nexcept ValueError as e:\n    print(f\"Invalid input: {e}\")\n```\n\n资料来源：[crates/yantrikdb-python/src/py_engine/memory.rs:20-25]()\n\n## Best Practices\n\n1. **Always close the database** when done to ensure proper cleanup:\n   ```python\n   db = yantrikdb.YantrikDB(\"memory.db\")\n   try:\n       # operations\n   finally:\n       db.close()\n   ```\n\n2. **Use context managers** when possible for automatic cleanup\n\n3. **Batch operations** when recording multiple related memories\n\n4. **Choose appropriate namespaces** to partition data logically\n\n5. **Set importance values** appropriately to control memory retention and retrieval priority\n\n---\n\n---\n\n## Doramagic 踩坑日志\n\n项目：yantrikos/yantrikdb\n\n摘要：发现 24 个潜在踩坑项，其中 0 个为 high/blocking；最高优先级：安装坑 - 来源证据：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication。\n\n## 1. 安装坑 · 来源证据：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication\n- 对用户的影响：可能阻塞安装或首次运行。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_4ab95be6a3ac4fb192053e8c3829f762 | https://github.com/yantrikos/yantrikdb/issues/9 | 来源讨论提到 node 相关条件，需在安装/试用前复核。\n\n## 2. 安装坑 · 来源证据：Bug: `namespace` parameter ignored in batch `remember` calls — memories always stored under `default`\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：Bug: `namespace` parameter ignored in batch `remember` calls — memories always stored under `default`\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_c37cd96e9c8d476880caca4f7314118e | https://github.com/yantrikos/yantrikdb/issues/2 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 3. 安装坑 · 来源证据：Migration v14→v15 fails: ALTER TABLE on edges view\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：Migration v14→v15 fails: ALTER TABLE on edges view\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_bb378d100e9d472892b1d5e42e640cad | https://github.com/yantrikos/yantrikdb/issues/10 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 4. 安装坑 · 来源证据：[bug] Tombstoned memories still appear in similarity-scan recall results\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：[bug] Tombstoned memories still appear in similarity-scan recall results\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_aa3d426055a44483b47ffd3b9f3fdb6a | https://github.com/yantrikos/yantrikdb/issues/8 | 来源类型 github_issue 暴露的待验证使用条件。\n\n## 5. 安装坑 · 来源证据：[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently disabled\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently disabled\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_17652fc680ba4b64bee5018b2d1514e4 | https://github.com/yantrikos/yantrikdb/issues/6 | 来源讨论提到 docker 相关条件，需在安装/试用前复核。\n\n## 6. 安装坑 · 来源证据：v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_daa2ca5265524c83bb21727be2a980a1 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.10 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 7. 安装坑 · 来源证据：v0.7.11 — pyo3 0.28.3 + python3.14 Support\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.11 — pyo3 0.28.3 + python3.14 Support\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_91b7975fce7d49b6b87ef05b914e80b2 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.11 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 8. 安装坑 · 来源证据：v0.7.4 — Python Bindings: with_default + record_text/recall_text\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.4 — Python Bindings: with_default + record_text/recall_text\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_54938994017d4b5899ad9cef4e6a2723 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.4 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 9. 安装坑 · 来源证据：v0.7.5 — Python UX: TypeError Guard + embedder-download in Default Wheel\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.5 — Python UX: TypeError Guard + embedder-download in Default Wheel\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_be61ad4afd5b4f669a6f727d727474c4 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.5 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 10. 配置坑 · 可能修改宿主 AI 配置\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：项目面向 Claude/Cursor/Codex/Gemini/OpenCode 等宿主，或安装命令涉及用户配置目录。\n- 对用户的影响：安装可能改变本机 AI 工具行为，用户需要知道写入位置和回滚方法。\n- 建议检查：列出会写入的配置文件、目录和卸载/回滚步骤。\n- 防护动作：涉及宿主配置目录时必须给回滚路径，不能只给安装命令。\n- 证据：capability.host_targets | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | host_targets=mcp_host, claude, claude_code\n\n## 11. 配置坑 · 来源证据：v0.7.7 — recall_text Keyword-Only Filter Args\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个配置相关的待验证问题：v0.7.7 — recall_text Keyword-Only Filter Args\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_45587e0ca02f4e95ac36c364d3a88519 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.7 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 12. 能力坑 · 能力判断依赖假设\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：README/documentation is current enough for a first validation pass.\n- 对用户的影响：假设不成立时，用户拿不到承诺的能力。\n- 建议检查：将假设转成下游验证清单。\n- 防护动作：假设必须转成验证项；没有验证结果前不能写成事实。\n- 证据：capability.assumptions | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | README/documentation is current enough for a first validation pass.\n\n## 13. 运行坑 · 来源证据：think() runs consolidation before conflict detection — contradictions get merged\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个运行相关的待验证问题：think() runs consolidation before conflict detection — contradictions get merged\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_6908447fb6a6482f89b1a85e714de42a | https://github.com/yantrikos/yantrikdb/issues/1 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 14. 维护坑 · 维护活跃度未知\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：未记录 last_activity_observed。\n- 对用户的影响：新项目、停更项目和活跃项目会被混在一起，推荐信任度下降。\n- 建议检查：补 GitHub 最近 commit、release、issue/PR 响应信号。\n- 防护动作：维护活跃度未知时，推荐强度不能标为高信任。\n- 证据：evidence.maintainer_signals | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | last_activity_observed missing\n\n## 15. 安全/权限坑 · 下游验证发现风险项\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：no_demo\n- 对用户的影响：下游已经要求复核，不能在页面中弱化。\n- 建议检查：进入安全/权限治理复核队列。\n- 防护动作：下游风险存在时必须保持 review/recommendation 降级。\n- 证据：downstream_validation.risk_items | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | no_demo; severity=medium\n\n## 16. 安全/权限坑 · 存在安全注意事项\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：No sandbox install has been executed yet; downstream must verify before user use.\n- 对用户的影响：用户安装前需要知道权限边界和敏感操作。\n- 建议检查：转成明确权限清单和安全审查提示。\n- 防护动作：安全注意事项必须面向用户前置展示。\n- 证据：risks.safety_notes | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | No sandbox install has been executed yet; downstream must verify before user use.\n\n## 17. 安全/权限坑 · 存在评分风险\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：no_demo\n- 对用户的影响：风险会影响是否适合普通用户安装。\n- 建议检查：把风险写入边界卡，并确认是否需要人工复核。\n- 防护动作：评分风险必须进入边界卡，不能只作为内部分数。\n- 证据：risks.scoring_risks | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | no_demo; severity=medium\n\n## 18. 安全/权限坑 · 来源证据：[bug] POST /v1/admin/snapshot unusable in single-node mode — requires cluster master token that doesn't exist\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：[bug] POST /v1/admin/snapshot unusable in single-node mode — requires cluster master token that doesn't exist\n- 对用户的影响：可能影响授权、密钥配置或安全边界。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_80497be2ab644e66be4fec1a966b4c10 | https://github.com/yantrikos/yantrikdb/issues/7 | 来源讨论提到 node 相关条件，需在安装/试用前复核。\n\n## 19. 安全/权限坑 · 来源证据：[bug] at-rest encryption `key_hex` in TOML has no effect on disk (v0.5.0)\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：[bug] at-rest encryption `key_hex` in TOML has no effect on disk (v0.5.0)\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_ca7c8f7ee1384f9d97652734d01b8d67 | https://github.com/yantrikos/yantrikdb/issues/3 | 来源讨论提到 docker 相关条件，需在安装/试用前复核。\n\n## 20. 安全/权限坑 · 来源证据：v0.7.6 — Drop sentence-transformers + numpy from Default Deps\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：v0.7.6 — Drop sentence-transformers + numpy from Default Deps\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_40bcf8933f1b4ec7a559a746497c3bae | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.6 | 来源讨论提到 windows 相关条件，需在安装/试用前复核。\n\n## 21. 安全/权限坑 · 来源证据：v0.7.8 — Extended Idempotent Migration Runner (closes #10)\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：v0.7.8 — Extended Idempotent Migration Runner (closes #10)\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_e5a77701b7ac401a863105d996cb585c | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.8 | 来源类型 github_release 暴露的待验证使用条件。\n\n## 22. 安全/权限坑 · 来源证据：v0.7.9 — Bundle potion-multilingual-128M (101 Languages) in embedder-download Registry\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：v0.7.9 — Bundle potion-multilingual-128M (101 Languages) in embedder-download Registry\n- 对用户的影响：可能影响授权、密钥配置或安全边界。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_7a590e518c884b5b9a2bbdc995c372fd | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.9 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 23. 维护坑 · issue/PR 响应质量未知\n\n- 严重度：low\n- 证据强度：source_linked\n- 发现：issue_or_pr_quality=unknown。\n- 对用户的影响：用户无法判断遇到问题后是否有人维护。\n- 建议检查：抽样最近 issue/PR，判断是否长期无人处理。\n- 防护动作：issue/PR 响应未知时，必须提示维护风险。\n- 证据：evidence.maintainer_signals | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | issue_or_pr_quality=unknown\n\n## 24. 维护坑 · 发布节奏不明确\n\n- 严重度：low\n- 证据强度：source_linked\n- 发现：release_recency=unknown。\n- 对用户的影响：安装命令和文档可能落后于代码，用户踩坑概率升高。\n- 建议检查：确认最近 release/tag 和 README 安装命令是否一致。\n- 防护动作：发布节奏未知或过期时，安装说明必须标注可能漂移。\n- 证据：evidence.maintainer_signals | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | release_recency=unknown\n\n<!-- canonical_name: yantrikos/yantrikdb; human_manual_source: deepwiki_human_wiki -->\n",
      "summary": "DeepWiki/Human Wiki 完整输出，末尾追加 Discovery Agent 踩坑日志。",
      "title": "Human Manual / 人类版说明书"
    },
    "pitfall_log": {
      "asset_id": "pitfall_log",
      "filename": "PITFALL_LOG.md",
      "markdown": "# Pitfall Log / 踩坑日志\n\n项目：yantrikos/yantrikdb\n\n摘要：发现 24 个潜在踩坑项，其中 0 个为 high/blocking；最高优先级：安装坑 - 来源证据：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication。\n\n## 1. 安装坑 · 来源证据：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：API addition: deterministic mutation primitives (record_with_rid + friends) for cluster-mode replication\n- 对用户的影响：可能阻塞安装或首次运行。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_4ab95be6a3ac4fb192053e8c3829f762 | https://github.com/yantrikos/yantrikdb/issues/9 | 来源讨论提到 node 相关条件，需在安装/试用前复核。\n\n## 2. 安装坑 · 来源证据：Bug: `namespace` parameter ignored in batch `remember` calls — memories always stored under `default`\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：Bug: `namespace` parameter ignored in batch `remember` calls — memories always stored under `default`\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_c37cd96e9c8d476880caca4f7314118e | https://github.com/yantrikos/yantrikdb/issues/2 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 3. 安装坑 · 来源证据：Migration v14→v15 fails: ALTER TABLE on edges view\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：Migration v14→v15 fails: ALTER TABLE on edges view\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_bb378d100e9d472892b1d5e42e640cad | https://github.com/yantrikos/yantrikdb/issues/10 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 4. 安装坑 · 来源证据：[bug] Tombstoned memories still appear in similarity-scan recall results\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：[bug] Tombstoned memories still appear in similarity-scan recall results\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_aa3d426055a44483b47ffd3b9f3fdb6a | https://github.com/yantrikos/yantrikdb/issues/8 | 来源类型 github_issue 暴露的待验证使用条件。\n\n## 5. 安装坑 · 来源证据：[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently disabled\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：[bug] YANTRIKDB_ENCRYPTION_KEY_HEX env var ignored — encryption silently disabled\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_17652fc680ba4b64bee5018b2d1514e4 | https://github.com/yantrikos/yantrikdb/issues/6 | 来源讨论提到 docker 相关条件，需在安装/试用前复核。\n\n## 6. 安装坑 · 来源证据：v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.10 — Fix has_embedder() for Python-side embedders (plugin#4)\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_daa2ca5265524c83bb21727be2a980a1 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.10 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 7. 安装坑 · 来源证据：v0.7.11 — pyo3 0.28.3 + python3.14 Support\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.11 — pyo3 0.28.3 + python3.14 Support\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_91b7975fce7d49b6b87ef05b914e80b2 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.11 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 8. 安装坑 · 来源证据：v0.7.4 — Python Bindings: with_default + record_text/recall_text\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.4 — Python Bindings: with_default + record_text/recall_text\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_54938994017d4b5899ad9cef4e6a2723 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.4 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 9. 安装坑 · 来源证据：v0.7.5 — Python UX: TypeError Guard + embedder-download in Default Wheel\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安装相关的待验证问题：v0.7.5 — Python UX: TypeError Guard + embedder-download in Default Wheel\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_be61ad4afd5b4f669a6f727d727474c4 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.5 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 10. 配置坑 · 可能修改宿主 AI 配置\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：项目面向 Claude/Cursor/Codex/Gemini/OpenCode 等宿主，或安装命令涉及用户配置目录。\n- 对用户的影响：安装可能改变本机 AI 工具行为，用户需要知道写入位置和回滚方法。\n- 建议检查：列出会写入的配置文件、目录和卸载/回滚步骤。\n- 防护动作：涉及宿主配置目录时必须给回滚路径，不能只给安装命令。\n- 证据：capability.host_targets | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | host_targets=mcp_host, claude, claude_code\n\n## 11. 配置坑 · 来源证据：v0.7.7 — recall_text Keyword-Only Filter Args\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个配置相关的待验证问题：v0.7.7 — recall_text Keyword-Only Filter Args\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_45587e0ca02f4e95ac36c364d3a88519 | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.7 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 12. 能力坑 · 能力判断依赖假设\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：README/documentation is current enough for a first validation pass.\n- 对用户的影响：假设不成立时，用户拿不到承诺的能力。\n- 建议检查：将假设转成下游验证清单。\n- 防护动作：假设必须转成验证项；没有验证结果前不能写成事实。\n- 证据：capability.assumptions | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | README/documentation is current enough for a first validation pass.\n\n## 13. 运行坑 · 来源证据：think() runs consolidation before conflict detection — contradictions get merged\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个运行相关的待验证问题：think() runs consolidation before conflict detection — contradictions get merged\n- 对用户的影响：可能增加新用户试用和生产接入成本。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_6908447fb6a6482f89b1a85e714de42a | https://github.com/yantrikos/yantrikdb/issues/1 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 14. 维护坑 · 维护活跃度未知\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：未记录 last_activity_observed。\n- 对用户的影响：新项目、停更项目和活跃项目会被混在一起，推荐信任度下降。\n- 建议检查：补 GitHub 最近 commit、release、issue/PR 响应信号。\n- 防护动作：维护活跃度未知时，推荐强度不能标为高信任。\n- 证据：evidence.maintainer_signals | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | last_activity_observed missing\n\n## 15. 安全/权限坑 · 下游验证发现风险项\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：no_demo\n- 对用户的影响：下游已经要求复核，不能在页面中弱化。\n- 建议检查：进入安全/权限治理复核队列。\n- 防护动作：下游风险存在时必须保持 review/recommendation 降级。\n- 证据：downstream_validation.risk_items | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | no_demo; severity=medium\n\n## 16. 安全/权限坑 · 存在安全注意事项\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：No sandbox install has been executed yet; downstream must verify before user use.\n- 对用户的影响：用户安装前需要知道权限边界和敏感操作。\n- 建议检查：转成明确权限清单和安全审查提示。\n- 防护动作：安全注意事项必须面向用户前置展示。\n- 证据：risks.safety_notes | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | No sandbox install has been executed yet; downstream must verify before user use.\n\n## 17. 安全/权限坑 · 存在评分风险\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：no_demo\n- 对用户的影响：风险会影响是否适合普通用户安装。\n- 建议检查：把风险写入边界卡，并确认是否需要人工复核。\n- 防护动作：评分风险必须进入边界卡，不能只作为内部分数。\n- 证据：risks.scoring_risks | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | no_demo; severity=medium\n\n## 18. 安全/权限坑 · 来源证据：[bug] POST /v1/admin/snapshot unusable in single-node mode — requires cluster master token that doesn't exist\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：[bug] POST /v1/admin/snapshot unusable in single-node mode — requires cluster master token that doesn't exist\n- 对用户的影响：可能影响授权、密钥配置或安全边界。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_80497be2ab644e66be4fec1a966b4c10 | https://github.com/yantrikos/yantrikdb/issues/7 | 来源讨论提到 node 相关条件，需在安装/试用前复核。\n\n## 19. 安全/权限坑 · 来源证据：[bug] at-rest encryption `key_hex` in TOML has no effect on disk (v0.5.0)\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：[bug] at-rest encryption `key_hex` in TOML has no effect on disk (v0.5.0)\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_ca7c8f7ee1384f9d97652734d01b8d67 | https://github.com/yantrikos/yantrikdb/issues/3 | 来源讨论提到 docker 相关条件，需在安装/试用前复核。\n\n## 20. 安全/权限坑 · 来源证据：v0.7.6 — Drop sentence-transformers + numpy from Default Deps\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：v0.7.6 — Drop sentence-transformers + numpy from Default Deps\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_40bcf8933f1b4ec7a559a746497c3bae | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.6 | 来源讨论提到 windows 相关条件，需在安装/试用前复核。\n\n## 21. 安全/权限坑 · 来源证据：v0.7.8 — Extended Idempotent Migration Runner (closes #10)\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：v0.7.8 — Extended Idempotent Migration Runner (closes #10)\n- 对用户的影响：可能影响升级、迁移或版本选择。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_e5a77701b7ac401a863105d996cb585c | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.8 | 来源类型 github_release 暴露的待验证使用条件。\n\n## 22. 安全/权限坑 · 来源证据：v0.7.9 — Bundle potion-multilingual-128M (101 Languages) in embedder-download Registry\n\n- 严重度：medium\n- 证据强度：source_linked\n- 发现：GitHub 社区证据显示该项目存在一个安全/权限相关的待验证问题：v0.7.9 — Bundle potion-multilingual-128M (101 Languages) in embedder-download Registry\n- 对用户的影响：可能影响授权、密钥配置或安全边界。\n- 建议检查：来源显示可能已有修复、规避或版本变化，说明书中必须标注适用版本。\n- 防护动作：不得脱离来源链接放大为确定性结论；需要标注适用版本和复核状态。\n- 证据：community_evidence:github | cevd_7a590e518c884b5b9a2bbdc995c372fd | https://github.com/yantrikos/yantrikdb/releases/tag/v0.7.9 | 来源讨论提到 python 相关条件，需在安装/试用前复核。\n\n## 23. 维护坑 · issue/PR 响应质量未知\n\n- 严重度：low\n- 证据强度：source_linked\n- 发现：issue_or_pr_quality=unknown。\n- 对用户的影响：用户无法判断遇到问题后是否有人维护。\n- 建议检查：抽样最近 issue/PR，判断是否长期无人处理。\n- 防护动作：issue/PR 响应未知时，必须提示维护风险。\n- 证据：evidence.maintainer_signals | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | issue_or_pr_quality=unknown\n\n## 24. 维护坑 · 发布节奏不明确\n\n- 严重度：low\n- 证据强度：source_linked\n- 发现：release_recency=unknown。\n- 对用户的影响：安装命令和文档可能落后于代码，用户踩坑概率升高。\n- 建议检查：确认最近 release/tag 和 README 安装命令是否一致。\n- 防护动作：发布节奏未知或过期时，安装说明必须标注可能漂移。\n- 证据：evidence.maintainer_signals | github_repo:1164482810 | https://github.com/yantrikos/yantrikdb | release_recency=unknown\n",
      "summary": "用户实践前最可能遇到的身份、安装、配置、运行和安全坑。",
      "title": "Pitfall Log / 踩坑日志"
    },
    "prompt_preview": {
      "asset_id": "prompt_preview",
      "filename": "PROMPT_PREVIEW.md",
      "markdown": "# yantrikdb - Prompt Preview\n\n> Copy the prompt below into your AI host before installing anything.\n> Its purpose is to let you safely feel the project's workflow, not to claim the project has already run.\n\n## Copy this prompt\n\n```text\nYou are using an independent Doramagic capability pack for yantrikos/yantrikdb.\n\nProject:\n- Name: yantrikdb\n- Repository: https://github.com/yantrikos/yantrikdb\n- Summary: Cognitive memory engine for AI agents — temporal decay, contradiction detection, autonomous consolidation, knowledge graph, ANN recall via HNSW. Embeddable Rust library with Python bindings; powers yantrikdb-server (HTTP gateway, MCP server, openraft cluster). AGPL.\n- Host target: mcp_host, claude, claude_code\n\nGoal:\nHelp me evaluate this project for the following task without installing it yet: Cognitive memory engine for AI agents — temporal decay, contradiction detection, autonomous consolidation, knowledge graph, ANN recall via HNSW. Embeddable Rust library with Python bindings; powers yantrikdb-server (HTTP gateway, MCP server, openraft cluster). AGPL.\n\nBefore taking action:\n1. Restate my task, success standard, and boundary.\n2. Identify whether the next step requires tools, browser access, network access, filesystem access, credentials, package installation, or host configuration.\n3. Use only the Doramagic Project Pack, the upstream repository, and the source-linked evidence listed below.\n4. If a real command, install step, API call, file write, or host integration is required, mark it as \"requires post-install verification\" and ask for approval first.\n5. If evidence is missing, say \"evidence is missing\" instead of filling the gap.\n\nPreviewable capabilities:\n- Capability 1: Cognitive memory engine for AI agents — temporal decay, contradiction detection, autonomous consolidation, knowledge graph, ANN recall via HNSW. Embeddable Rust library with Python bindings; powers yantrikdb-server (HTTP gateway, MCP server, openraft cluster). AGPL.\n\nCapabilities that require post-install verification:\n- Capability 1: Use the source-backed project context to guide one small, checkable workflow step.\n\nCore service flow:\n1. page-overview: Overview. Produce one small intermediate artifact and wait for confirmation.\n2. page-installation: Installation. Produce one small intermediate artifact and wait for confirmation.\n3. page-five-index-architecture: Five-Index Architecture. Produce one small intermediate artifact and wait for confirmation.\n4. page-decoupled-write-path: Decoupled Write Path (LSM Architecture). Produce one small intermediate artifact and wait for confirmation.\n5. page-core-api: Core API Reference. Produce one small intermediate artifact and wait for confirmation.\n\nSource-backed evidence to keep in mind:\n- https://github.com/yantrikos/yantrikdb\n- https://github.com/yantrikos/yantrikdb#readme\n- README.md\n- crates/yantrikdb-core/src/lib.rs\n- pyproject.toml\n- crates/yantrikdb-python/Cargo.toml\n- crates/yantrikdb-python/pyproject.toml\n- src/yantrikdb/__init__.py\n- crates/yantrikdb-core/src/vector/hnsw.rs\n- crates/yantrikdb-core/src/vector/delta_index.rs\n\nFirst response rules:\n1. Start Step 1 only.\n2. Explain the one service action you will perform first.\n3. Ask exactly three questions about my target workflow, success standard, and sandbox boundary.\n4. Stop and wait for my answers.\n\nStep 1 follow-up protocol:\n- After I answer the first three questions, stay in Step 1.\n- Produce six parts only: clarified task, success standard, boundary conditions, two or three options, tradeoffs for each option, and one recommendation.\n- End by asking whether I confirm the recommendation.\n- Do not move to Step 2 until I explicitly confirm.\n\nConversation rules:\n- Advance one step at a time and wait for confirmation after each small artifact.\n- Write outputs as recommendations or planned checks, not as completed execution.\n- Do not claim tests passed, files changed, commands ran, APIs were called, or the project was installed.\n- If the user asks for execution, first provide the sandbox setup, expected output, rollback, and approval checkpoint.\n```\n",
      "summary": "不安装项目也能感受能力节奏的安全试用 Prompt。",
      "title": "Prompt Preview / 安装前试用 Prompt"
    },
    "quick_start": {
      "asset_id": "quick_start",
      "filename": "QUICK_START.md",
      "markdown": "# Quick Start / 官方入口\n\n项目：yantrikos/yantrikdb\n\n## 官方安装入口\n\n### Python / pip · 官方安装入口\n\n```bash\npip install yantrikdb-mcp\n```\n\n来源：https://github.com/yantrikos/yantrikdb#readme\n\n## 来源\n\n- repo: https://github.com/yantrikos/yantrikdb\n- docs: https://github.com/yantrikos/yantrikdb#readme\n",
      "summary": "从项目官方 README 或安装文档提取的开工入口。",
      "title": "Quick Start / 官方入口"
    }
  },
  "validation_id": "dval_c248d7a8ec53475a8b3593d4b8b4e781"
}
