From f54d4c96b920c29ee812fe642c6d452d272218ce Mon Sep 17 00:00:00 2001 From: Paul Kyle Date: Sun, 19 Apr 2026 21:54:12 -0700 Subject: [PATCH 1/2] =?UTF-8?q?release:=20v0.7.x=20=E2=80=94=20palinode=20?= =?UTF-8?q?init,=20slash=20commands,=20ASCII=20banner,=20scope=20Layer=201?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .github/workflows/test.yml | 118 ++- .gitignore | 1 + PRD.md | 720 ++++++++++++++ PRIVACY.md | 56 ++ PROGRAM.md | 8 +- README.md | 97 +- SECURITY.md | 33 + claude-plugin/.claude-plugin/plugin.json | 6 +- claude-plugin/README.md | 15 +- docs/ACKNOWLEDGMENTS.md | 2 +- docs/CHANGELOG.md | 76 +- docs/DEPLOYMENT-GUIDE.md | 2 +- docs/GIT-MEMORY.md | 8 +- docs/HOW-MEMORY-WORKS.md | 8 +- docs/INSTALL-CLAUDE-CODE.md | 9 +- docs/OPERATIONS.md | 290 ++++++ examples/compaction-demo/README.md | 2 +- examples/hooks/README.md | 63 ++ examples/hooks/palinode-session-end.sh | 62 ++ examples/hooks/settings.json | 15 + examples/sample-memory/README.md | 15 + palinode/__init__.py | 2 +- palinode/api/server.py | 447 ++++++++- palinode/cli/__init__.py | 32 +- palinode/cli/_api.py | 14 + palinode/cli/init.py | 390 ++++++++ palinode/cli/lint.py | 33 +- palinode/cli/manage.py | 10 + palinode/cli/migrate.py | 95 ++ palinode/cli/save.py | 24 +- palinode/cli/search.py | 2 +- palinode/cli/session_end.py | 21 + palinode/consolidation/write_time.py | 11 +- palinode/core/audit.py | 106 ++ palinode/core/brand.py | 17 + palinode/core/config.py | 52 +- palinode/core/git_tools.py | 4 - palinode/core/lint.py | 26 +- palinode/core/parser.py | 37 +- palinode/core/scope.py | 72 ++ palinode/core/store.py | 6 +- palinode/mcp.py | 102 +- plugin/index.ts | 15 - pyproject.toml | 14 +- scripts/scrub-check.sh | 106 ++ skill/palinode-claude-code/SKILL.md | 2 +- .../palinode-claude-code/references/setup.md | 5 +- skill/palinode-memory/SKILL.md | 4 +- skill/palinode-memory/references/setup.md | 9 +- tests/integration/__init__.py | 0 tests/integration/test_api_roundtrip.py | 356 +++++++ tests/live/__init__.py | 0 tests/live/test_live_instance.py | 313 ++++++ tests/test_audit.py | 184 ++++ tests/test_cli_init.py | 187 ++++ tests/test_content_hash_and_confidence.py | 153 +++ tests/test_context.py | 20 +- tests/test_daily_penalty.py | 244 +++++ tests/test_description.py | 71 ++ tests/test_entity_normalization.py | 42 + tests/test_lint.py | 74 +- tests/test_migrate_openclaw.py | 254 +++++ tests/test_parser.py | 93 +- tests/test_scope_chain.py | 138 +++ tests/test_search_dedup.py | 150 +++ tests/test_session_end.py | 68 ++ tests/test_store.py | 97 +- uv.lock | 940 ++++++++++++++++++ 68 files changed, 6416 insertions(+), 232 deletions(-) create mode 100644 PRD.md create mode 100644 PRIVACY.md create mode 100644 SECURITY.md create mode 100644 docs/OPERATIONS.md create mode 100644 examples/hooks/README.md create mode 100644 examples/hooks/palinode-session-end.sh create mode 100644 examples/hooks/settings.json create mode 100644 examples/sample-memory/README.md create mode 100644 palinode/cli/init.py create mode 100644 palinode/cli/migrate.py create mode 100644 palinode/core/audit.py create mode 100644 palinode/core/brand.py create mode 100644 palinode/core/scope.py create mode 100755 scripts/scrub-check.sh create mode 100644 tests/integration/__init__.py create mode 100644 tests/integration/test_api_roundtrip.py create mode 100644 tests/live/__init__.py create mode 100644 tests/live/test_live_instance.py create mode 100644 tests/test_audit.py create mode 100644 tests/test_cli_init.py create mode 100644 tests/test_content_hash_and_confidence.py create mode 100644 tests/test_daily_penalty.py create mode 100644 tests/test_description.py create mode 100644 tests/test_entity_normalization.py create mode 100644 tests/test_migrate_openclaw.py create mode 100644 tests/test_scope_chain.py create mode 100644 tests/test_search_dedup.py create mode 100644 tests/test_session_end.py create mode 100644 uv.lock diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f05a624..22f7e19 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,4 +1,15 @@ -name: Tests +# CI pipeline for Palinode +# Runs on every push to main and on pull requests. +# +# Jobs: +# 1. unit-tests — fast feedback on core logic (no external services) +# 2. integration — placeholder for tests requiring Ollama/external deps +# 3. security-scan — bandit (code) + pip-audit (dependencies) + +name: CI + +env: + FORCE_JAVASCRIPT_ACTIONS_TO_NODE24: true on: push: @@ -7,22 +18,93 @@ on: branches: [ main ] jobs: - test: + # --------------------------------------------------------------------------- + # Unit tests — should never need network access or Ollama. + # All embeddings / LLM calls are mocked in the test suite. + # --------------------------------------------------------------------------- + unit-tests: + runs-on: ubuntu-latest + + strategy: + matrix: + python-version: ["3.11", "3.12"] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -e ".[dev]" + + - name: Run unit tests + run: pytest tests/ -v --tb=short + + # --------------------------------------------------------------------------- + # Integration tests — placeholder. + # + # When tests/integration/ is created, update the pytest path below. + # Integration tests will likely need an Ollama service container for + # BGE-M3 embeddings. That setup is deferred until the test suite exists. + # --------------------------------------------------------------------------- + integration-tests: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + cache: "pip" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -e ".[dev]" + + - name: Run integration tests + run: pytest tests/integration/ -v --tb=short + + # --------------------------------------------------------------------------- + # Security scans — informational for now (continue-on-error: true). + # + # bandit: static analysis for common Python security issues + # pip-audit: checks installed packages against known vulnerability databases + # + # These run in a single job to save runner time. Once the findings are + # triaged, remove continue-on-error to enforce them on PRs. + # --------------------------------------------------------------------------- + security-scan: runs-on: ubuntu-latest + steps: - - uses: actions/checkout@v4 - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.11" - cache: 'pip' - - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -e .[dev] - - - name: Run Pytest - run: | - pytest tests/ -v + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + cache: "pip" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -e ".[dev]" + pip install bandit pip-audit + + - name: Run bandit (static security analysis) + # -r: recursive, -ll: only medium+ severity findings + run: bandit -r palinode/ -ll + continue-on-error: true + + - name: Run pip-audit (dependency vulnerability check) + run: pip-audit + continue-on-error: true diff --git a/.gitignore b/.gitignore index 33301aa..4e5e3b5 100644 --- a/.gitignore +++ b/.gitignore @@ -48,3 +48,4 @@ venv/ server.log nohup.out .engram.db +.claude/worktrees/ diff --git a/PRD.md b/PRD.md new file mode 100644 index 0000000..972b01a --- /dev/null +++ b/PRD.md @@ -0,0 +1,720 @@ +--- +created: 2026-03-22T16:02:00Z +status: draft-v1 +author: Palinode contributors +--- + +# Palinode — Product Requirements Document + +**Persistent memory that makes AI agents smarter over time.** + +--- + +## 1. What Is Palinode + +Palinode is a memory system for long-lived AI agents. It stores what matters as typed, human-readable markdown files; indexes them for semantic search; injects relevant context at the start of every session; extracts new knowledge at the end of every turn; and consolidates raw captures into curated memory over time. + +It is not a knowledge base. It is not a search engine. It is memory — the kind that surfaces without being asked, improves through use, and degrades gracefully when infrastructure fails. + +--- + +## 2. Problem Statement + +AI agents wake up with amnesia every session. Current solutions: + +| Approach | What It Does | Why It Fails | +|---|---|---| +| **Flat file (MEMORY.md)** | Agent reads one big file at session start | Doesn't scale. 22K tokens, mostly irrelevant. No search. Manual maintenance. | +| **Mem0 (autocapture)** | Extracts facts from conversations into vector DB | Thin snippets without context. No types. No consolidation. 2,632 uncurated memories. Retrieval unreliable. | +| **QC MCP (14K contexts)** | Multi-platform capture into Postgres + pgvector | Overengineered. Goes down. No consolidation. 14K contexts = noise, not memory. Agent can't use it natively. | +| **Conversation history** | Model reads prior turns | Context window limit. Lost on session reset. No persistence. | + +All of these store things. None of them *remember*. + +--- + +## 3. Design Principles + +1. **Memory = files.** Markdown with YAML frontmatter. Human-readable, git-versioned, greppable. If every service crashes, `cat` still works. +2. **Typed, not flat.** People, projects, decisions, insights — each has a schema. Structure enables reliable retrieval and consolidation. +3. **Consolidation, not accumulation.** 100 sessions should produce 20 well-maintained files, not 100 unread dumps. The system gets smaller and more useful over time. +4. **Invisible when working.** The human talks to their agent. The agent uses Palinode behind the scenes. The only visible outputs are daily digests, weekly reviews, and better conversations. +5. **Graceful degradation.** Vector index down → read files. Embedding service down → grep. Machine off → it's a git repo, clone it anywhere. +6. **Infrastructure-agnostic.** Palinode is a service. OpenClaw is the first client. If the orchestration layer changes, Palinode is portable. +7. **CAG first, RAG at scale.** For small memory (<50 core files), just load the files. No vector search needed. As memory grows, vector search handles scale. The system naturally transitions from "load it all" to "search for relevant chunks." +8. **Zero taxonomy burden.** The human captures. The system classifies, creates entities, maintains the catalog. If the human has to maintain a taxonomy, the system dies. +9. **Nothing hardcoded.** Prompts live in markdown files. Policies live in PROGRAM.md. Thresholds live in config.yaml. The plugin code is plumbing — all behavior is defined in editable, version-controlled text. +10. **Trust through transparency.** Every memory operation is logged. Every file has provenance. Corrections are easy. The system earns trust by being inspectable, not by being perfect. + +--- + +## 4. Users + +**Primary:** AI agents (Claude-based) acting as long-lived personal assistants. + +**Secondary:** The human user who browses, edits, and reviews memory files directly; receives daily digests and weekly reviews. + +**Tertiary:** Other AI agents that read from shared memory (multi-agent setups). + +--- + +## 5. Features + +### 5.1 Memory Store (the filing cabinet) + +**What:** Typed markdown files organized by category, with YAML frontmatter for metadata. + +**Directory structure:** + +``` +~/.palinode/ +├── people/ → who you know (relationships, prefs, follow-ups) +├── projects/ → what you're building (status, next actions, blockers) +├── decisions/ → choices made (rationale, what was rejected, supersedes) +├── insights/ → lessons learned (patterns, recurring themes) +├── specs/ → living specs and PRDs (the North Star docs) +├── daily/ → raw session logs (ephemeral, feeds consolidation) +├── research/ → reference material with provenance (source, date, author) +├── inbox/ → unclassified captures (confidence < threshold) +├── archive/ → superseded/old items (kept for audit, excluded from search) +└── PROGRAM.md → drives memory manager behavior (the spec-as-agent pattern) +``` + +**Frontmatter tags:** + +- `core: true` — this file is always loaded at session start (Phase 1 / CAG mode). No vector search needed. +- `status: active | archived | superseded` — controls search visibility and consolidation behavior +- `entities:` — cross-references to other files (auto-maintained by memory manager) + +**Schema evolution:** YAML frontmatter is additive. New fields can be added without updating existing files. Missing fields use defaults. Never break backward compatibility. + +**File format:** + +```yaml +--- +id: decision-langgraph-adoption +created: 2026-03-17T14:00:00Z +last_updated: 2026-03-22T15:00:00Z +category: decision +status: active +core: false +entities: [project/my-app] +supersedes: [] +confidence: 0.92 +source: session/2026-03-17 +--- + +# Decision: Adopt FastAPI for My App Backend + +## Statement +Use FastAPI for the backend API in the microservices pivot. + +## Rationale +- Async-first design matches our event-driven architecture +- Type hints provide automatic request validation +- OpenAPI spec generation simplifies integration testing + +## Alternatives Rejected +- Flask (synchronous, less suited for async workloads) +- Django REST (too heavy for microservice scope) +- Express.js (team more proficient in Python) +``` + +**Schemas (typed objects):** + +| Type | Key Fields | Maps To | +|---|---|---| +| `PersonMemory` | id, name, aliases, role, preferences, relationships, follow_ups, last_contact | `people/*.md` | +| `ProjectSnapshot` | id, name, status, current_work, recent_changes, blockers, linked_decisions | `projects/*.md` | +| `Decision` | id, project_id, statement, rationale, alternatives, supersedes, status | `decisions/*.md` | +| `ActionItem` | description, assignee, due_date, status, related_entities | embedded in project/people files | +| `Insight` | theme, description, evidence_refs, recurrence_count | `insights/*.md` | +| `ResearchRef` | title, source_url, source_file, date, summary, key_points, tags | `research/*.md` | + +### 5.2 Hybrid Index (the search layer) + +**What:** Vector embeddings combined with a full-text search (BM25) index of all memory files, enabling both semantic search and exact-keyword queries across the entire store. + +**Stack:** + +- **Vector & Keyword store:** SQLite-vec + FTS5 (embedded, no server, single file at `palinode/.palinode.db`) +- **Embedding model:** BGE-M3 via Ollama (1536d, 8K context, top-tier retrieval) +- **Embedding server:** Ollama (local or remote GPU) + +**Indexing:** + +- File watcher daemon (`watchdog`) monitors the memory directory +- On file create/modify: parse markdown → split by headings. +- Deduplication: compute `content_hash` (SHA-256) of text. Skip Ollama embedding call if identical to existing hash. +- Insert sections to `chunks` table → auto-syncs to FTS5 virtual table → upserts vectors to SQLite-vec. +- On file delete: remove all references for that file_path +- YAML frontmatter parsed as structured metadata payload (not embedded as text) +- Each vector point carries: `file_path`, `section_id`, `category`, `entity_refs`, `created_at`, `last_updated`, `importance`, `tags`, `status` + +**Search:** + +- Hybrid search: Reciprocal Rank Fusion (RRF) combines semantic similarity (cosine distance) with BM25 keyword matching + metadata filtering (category, entity, recency, status). +- Hybrid ranking: RRF combined score × recency weight × importance weight (weights configurable in `specs/prompts/context-assembly.md`) +- Exclude `status: archived` from default search results +- Return file paths + section IDs so the agent can read the full file if needed + +**Hierarchical retrieval (context expansion):** +When a chunk matches, don't return the chunk alone: + +1. Always include the file's YAML frontmatter (structural context) +2. Include adjacent sections from the same file (parent expansion) +3. If the file is short enough and within budget, load the full file +4. Vector search is the first pass; context expansion is the second. Isolated chunks without structure are useless. + +### 5.3 Memory Manager (the brain) + +**What:** An LLM-powered extraction and update pipeline that runs at the end of every agent turn. + +**Extraction (per turn):** + +1. Reads `PROGRAM.md` for current behavior instructions (changing PROGRAM.md changes behavior immediately, no restart) +2. Reads extraction prompt from `specs/prompts/extraction.md` +3. Receives last N messages from the conversation +4. Runs typed extraction: returns structured JSON matching schemas +5. Auto-creates entity files for new people/projects on first mention (no human taxonomy work) + +**Consolidation also reads PROGRAM.md** at start of each pass for current consolidation rules. + +**Update (per candidate):** + +1. For each extracted item: search SQLite-vec for similar existing items (same entity/type, top-k) +2. Present old items + new candidate to LLM with tool schema +3. LLM decides: `ADD` (new file/section) | `UPDATE` (modify existing) | `DELETE` (mark archived) | `NOOP` (already known) +4. Apply operation: write/edit markdown file → trigger re-index + +**Conflict resolution:** + +- Recency wins: `last_updated` field determines which version is current +- Explicit supersession: new Decision that contradicts old → old gets `status: superseded`, new gets `supersedes: [old_id]` +- Both versions kept (audit trail); search layer prefers `status: active` + +**Aggressiveness controls** (all defined in PROGRAM.md, not hardcoded): + +- Hard cap: max items per turn, max per type (default 5/2 — tunable) +- Significance threshold: only extract decisions, project changes, person context, lessons — not routine Q&A +- Dry-run mode: log candidate items without applying (for tuning) +- Inbox fallback: uncertain classifications → `inbox/` for human review + +**Trust mechanisms:** + +- **Audit log:** Every ADD/UPDATE/DELETE/NOOP operation logged to `logs/operations.jsonl` with timestamp, source session, confidence, candidate text, and target file +- **Correction flow:** Human says "fix: that decision is wrong" → memory manager re-evaluates → presents options (edit, delete, supersede, reclassify) +- **Configurable receipt:** After each session's extraction, optionally notify the human what was captured (`receiptMode: silent | log | notify`) +- Receipt via daily digest: "Palinode captured 3 items today: 1 decision, 1 project update, 1 person note. 1 item in inbox awaiting review." + +**Task prompt capture:** + +- Detect when a user message looks like a substantial build spec or research request (length > 500 chars, contains structured instructions, mentions deliverables) +- Offer to save to `specs/task-prompts/{project}/` with date and slug +- Store prompt text + metadata: date, project, model used, output reference +- This preserves the "source code" (the spec) alongside the "compiled output" (the result) + +### 5.4 Context Injection (the recall) + +**What:** Dynamic assembly of relevant memory injected into the agent's context at session start and after the first user message. + +**Phase 1 — Core memory (before first user turn, CAG mode):** + +- Load ALL files with `core: true` in frontmatter — no vector search, just read the files +- Typical core files: + - User profile (`people/core.md`) + - Active project specs (`projects/*/program.md`) + - Standing decisions (`decisions/core.md`) + - Key people index (`people/core.md`) +- Budget: configurable via `coreMemoryBudget` (default ~2K tokens) +- If total core memory exceeds budget, prioritize by: most recently updated → highest importance → alphabetical +- **This is CAG for core memory.** No retrieval latency, no similarity threshold. Just load what matters. + +**Phase 2 — Topic-specific recall (after first user message):** + +- Use the first message as a search query against SQLite-vec +- Filter by: matching project, matching people, matching topics +- Rank by: vector score × recency × importance +- Group results: facts, preferences, decisions, recent activity +- Budget: ~2K additional tokens +- Inject as structured sections (not flat bullet list) + +**Tool-based retrieval (during session):** + +- Agent has `palinode_search` tool for on-demand deeper recall +- Triggered when agent detects missing context or ambiguity +- Returns file paths + content sections; agent reads as needed + +**Cold start:** + +- No topic signal yet → load only Phase 1 (profile + generic preferences) +- First user message triggers Phase 2 + +### 5.5 Capture (the input) + +**Three capture modes, one extraction pipeline:** + +**Mode 1: Conversational (automatic + explicit)** + +- **Automatic:** `agent_end` hook extracts typed memories from every substantive turn +- **Explicit:** "Remember: Alice wants 5 modules" → classified and filed immediately +- **Channels:** Telegram, Slack, webchat, CLI — all go through OpenClaw → Palinode plugin + +**Mode 2: Document ingestion (file drops)** + +- Watch folder: `~/palinode-inbox/`, synced to `~/.palinode/inbox/raw/` +- Processing by file type: + - PDF → text extraction (pymupdf) → LLM summarize + extract → `research/*.md` + - Audio (m4a/mp3/wav) → Whisper transcription → transcript → summarize → `research/*.md` + - Video → extract audio → Transcriptor → same as audio + - Markdown/text → classify directly → appropriate bucket + - URL (.webloc/.url/text containing URL) → fetch → readability extract → summarize → `research/*.md` +- Each ingested document produces a research reference file with provenance + extracted insights filed into appropriate buckets + +**Mode 3: Web capture (URLs)** + +- Agent command: "save " +- Fetch → readability extraction → LLM summarize → extract key points → write to `research/` +- Or quick mode: "remember: this article says X about Y" → treated as Mode 1 + +### 5.6 Consolidation (the sleep) + +**What:** Background process that distills raw captures into curated memory. The system gets better, not bigger. + +**Weekly consolidation cron:** + +**Per project:** + +- Collect all `daily/` notes from the past week mentioning this project +- LLM prompt: produce status update (3-7 bullets), key decisions with dates, lessons/insights, unresolved TODOs +- Write to `projects/{id}/summary.md` and `insights/{id}.md` +- Move processed daily notes to `archive/` + +**Decision supersession:** + +- Scan for new decisions that contradict existing ones (same project + topic) +- LLM comparison: "Does NEW supersede OLD, complement, or contradict?" +- If supersede: mark old as `status: superseded`, link from new via `supersedes: [old_id]` + +**Cross-project insights:** + +- Feed all recent notes across projects to LLM: + > "Identify recurring themes. For each: 1-2 sentence description + evidence note IDs." +- Store as `insights/2026-W12.md` + +**Entity reference maintenance:** + +- Union `entities` lists from source notes into consolidated notes +- Record `source_note_ids` for traceability +- Weekly backward-linking scan: for new entities, search for unlinked references in existing notes + +**Archive management:** + +- Consolidated daily notes → `archive/daily/` +- Superseded decisions → `status: superseded` (stay in `decisions/`, excluded from default search) +- Truly obsolete items → `archive/` with pointer from main file + +### 5.7 Surfacing (the proactive layer) + +**Daily digest (morning, via Telegram):** + +- Top 3 actions across active projects +- Follow-ups due with people +- One thing that might be stuck +- <150 words + +**Weekly review (Sunday evening):** + +- What happened across all projects this week +- Stale items (things that haven't moved) +- Recurring patterns the system noticed +- Suggested focus for next week +- <250 words + +**Session-start nudge:** + +- "You last worked on My App two days ago. Status: M5 Phase 1 complete, waiting on Alice's feedback." +- Injected as part of Phase 1 context + +### 5.8 Quality Metrics + +**Logged per turn:** + +- `session_id`, `turn_id` +- `core_memory_ids_injected` +- `vector_hit_ids` + similarity scores +- `memory_ids_in_prompt` +- User corrections (pattern match: "no that's wrong", "actually we changed X", re-explanations) + +**Tracked metrics:** + +- **Re-prompt rate:** How often the human re-explains something Palinode should know +- **Recency correctness:** For known-change events, did injected memory reflect the latest version? +- **Over-influence:** Did stale memory override explicit user intent? +- **Token efficiency:** How much of the context budget is used by memory injection? + +--- + +## 6. Data Stack + +``` +┌─────────────────────────────────────────────────────────────┐ +│ SOURCE OF TRUTH │ +│ │ +│ Markdown files in ~/.palinode/ │ +│ Git-versioned (human-readable, diffable, portable) │ +│ YAML frontmatter for structured metadata │ +└──────────────────────────┬──────────────────────────────────┘ + │ +┌──────────────────────────▼──────────────────────────────────┐ +│ HYBRID INDEX │ +│ │ +│ SQLite-vec + FTS5 (embedded, .palinode.db) │ +│ - Section-level chunks (512-1024 tokens) │ +│ - BGE-M3 embeddings (1536d) via Ollama │ +│ - BM25 full-text search virtual table │ +│ - Metadata payload: file_path, category, entities, │ +│ created_at, last_updated, importance, status │ +│ - Hybrid search: RRF (vector + keyword) + metadata filters │ +└──────────────────────────┬──────────────────────────────────┘ + │ +┌──────────────────────────▼──────────────────────────────────┐ +│ COMPUTE │ +│ │ +│ Memory Manager (Claude via OpenClaw Plugin SDK) │ +│ - Extraction: typed schemas, max 5 items/turn │ +│ - Update: ADD/UPDATE/DELETE/NOOP per candidate │ +│ - Consolidation: weekly cron, LLM-driven merge/supersede │ +│ │ +│ File Watcher (Python watchdog, systemd service) │ +│ - Monitors palinode/ → embeds → upserts to SQLite-vec │ +│ │ +│ Embedding Generation (Ollama BGE-M3) │ +└──────────────────────────┬──────────────────────────────────┘ + │ +┌──────────────────────────▼──────────────────────────────────┐ +│ INTERFACES │ +│ │ +│ OpenClaw Plugin (openclaw-palinode) │ +│ - before_agent_start → inject context │ +│ - agent_end → extract memories │ +│ - Tools: palinode_search, palinode_save, palinode_ingest │ +│ - CLI: openclaw palinode search/stats/consolidate │ +│ │ +│ Capture Points │ +│ - Telegram / Slack / webchat / CLI (via OpenClaw) │ +│ - Watch folder (synced to inbox/raw/) │ +│ - Web capture (agent fetches URL → summarizes → files) │ +│ │ +│ Surfacing │ +│ - Daily digest (cron → Telegram) │ +│ - Weekly review (cron → Telegram) │ +│ - Session-start nudge (Phase 1 injection) │ +│ │ +│ Future: MCP server for cross-tool access │ +└─────────────────────────────────────────────────────────────┘ +``` + +--- + +## 7. Architecture Diagram + +``` +CAPTURE PROCESSING MEMORY +─────── ────────── ────── + +Telegram ────┐ ~/.palinode +Slack ───────┤ ┌──────────────┐ ├── people/*.md +Webchat ─────┼→ OpenClaw ──────→│ Palinode Plugin │ ├── projects/*.md +CLI ─────────┘ Agent │ │ ├── decisions/*.md + │ │ before_start │──→ inject ├── insights/*.md + │ │ agent_end │──→ extract ├── specs/*.md + │ │ tools │──→ search ├── daily/*.md + │ └──────┬───────┘ ├── research/*.md + │ │ ├── inbox/ +Watch ───────┐ │ ▼ ├── archive/ +File Drop ───┼───┼──→ Ingestion → Extraction ├── .palinode.db +URL Capture ─┘ │ Pipeline Pipeline └── PROGRAM.md + │ │ │ + │ ▼ ▼ + │ Transcriptor Memory Manager + │ (typed schemas) + │ ADD/UPDATE/DELETE/NOOP + │ │ + │ ▼ + │ Write markdown + index + │ + └──→ File Watcher Daemon (systemd) + watches palinode/ → embed via Ollama → upsert SQLite-vec + +Weekly Cron ────→ Consolidation: merge dailies → supersede decisions → extract insights +Daily Cron ─────→ Morning digest → Telegram +Sunday Cron ────→ Weekly review → Telegram +``` + +--- + +## 8. Integration with OpenClaw + +**Implementation:** OpenClaw Plugin (`openclaw-palinode`) + +**Plugin hooks used:** + +| Hook | Purpose | +|---|---| +| `before_agent_start` | Inject Phase 1 (core memory) + Phase 2 (topic-specific) context | +| `agent_end` | Extract typed memories from conversation | +| `command:new` / `command:reset` | Trigger full session extraction before context reset | +| `session:compact:before` | Extract from conversation before compaction discards detail | +| `agent:bootstrap` | Inject core memory files into bootstrap (via `bootstrap-extra-files` config) | +| `message:received` | Detect explicit "remember:" prefix captures | +| `gateway:startup` | Initialize SQLite-vec, verify Ollama connectivity | + +**Tools registered:** + +| Tool | Description | +|---|---| +| `palinode_search` | Semantic + metadata search across all memory files | +| `palinode_save` | Explicit capture — classify and file a thought/fact/decision | +| `palinode_ingest` | Process a URL, file, or document into research + extracted insights | +| `palinode_status` | Show memory stats: file counts, last consolidation, index health | + +**CLI commands:** + +| Command | Description | +|---|---| +| `openclaw palinode search ` | Search from terminal | +| `openclaw palinode stats` | Memory statistics | +| `openclaw palinode consolidate` | Run consolidation manually | +| `openclaw palinode reindex` | Rebuild SQLite-vec from files | + +**Config:** + +```yaml +extensions: + openclaw-palinode: + # Paths + palinodeDir: "~/.palinode" # Memory store root + programFile: "PROGRAM.md" # Memory manager behavior spec (relative to palinodeDir) + promptsDir: "specs/prompts" # System prompts directory (relative to palinodeDir) + + # Embedding + ollamaUrl: "http://localhost:11434" # Ollama endpoint for embeddings + embeddingModel: "bge-m3" # Model name — change without code changes + + # Behavior + autoCapture: true # Extract memories after each agent turn + autoRecall: true # Inject context before each agent turn + receiptMode: "digest" # silent | log | notify | digest + + # Budgets + coreMemoryBudget: 2048 # Max tokens for Phase 1 (core/CAG) injection + topicMemoryBudget: 2048 # Max tokens for Phase 2 (topic-specific) injection + + # Search + searchThreshold: 0.6 # Minimum similarity score for results + searchTopK: 10 # Max results per search + confidenceThreshold: 0.6 # Below this → inbox for human review + + # Schedules (cron expressions) + consolidationSchedule: "0 3 * * 0" # Sunday 3am UTC + dailyDigestSchedule: "0 14 * * *" # 7am Pacific (14:00 UTC) + weeklyReviewSchedule: "0 1 * * 0" # Sunday 1am UTC + + # Git + autoCommit: true # Commit after extraction/consolidation + gitRemote: "" # Remote for push (empty = no push) +``` + +All behavior-level configuration (extraction aggressiveness, what to capture, what to ignore, consolidation rules) lives in `PROGRAM.md` and `specs/prompts/*.md`, NOT in this config. Config is for plumbing. PROGRAM.md is for policy. + +**Transition from Mem0:** + +1. Install `openclaw-palinode` alongside `openclaw-mem0` +2. Both run in parallel — Mem0 continues autorecall, Palinode does its own +3. Agent has both `memory_search` (Mem0) and `palinode_search` (Palinode) +4. Once Palinode proves better retrieval, disable Mem0's autoRecall/autoCapture +5. Eventually remove `openclaw-mem0` extension + +--- + +## 9. Prompts as Source Code + +### The Karpathy/YC Parallel + +In Karpathy's autoresearch, the most important file is not `train.py` (the code the agent modifies). It's `program.md` (the spec that tells the agent how to think). The human iterates on the spec; the agent iterates on the work. They never touch each other's domain. + +``` +autoresearch: palinode: + program.md → agent behavior PROGRAM.md → memory manager behavior + train.py → the work specs/prompts → the executable prompts + results.tsv → experiment log quality metrics → experiment log +``` + +YC/HumanLayer's 12 Factor Agents extends this: **"Your prompts and specs are the source code. Throwing them away after generating output is like compiling Java and checking in the .jar but not the .java."** + +This applies at three levels in Palinode: + +### Level 1: System Prompts (how the machinery thinks) + +The prompts that drive Palinode's behavior — extraction, update, consolidation, context assembly, ingestion, surfacing. These live as editable markdown files, not hardcoded strings. + +``` +specs/prompts/ +├── extraction.md ← typed extraction prompt +├── update.md ← ADD/UPDATE/DELETE/NOOP decision prompt +├── consolidation.md ← weekly merge/supersede logic +├── context-assembly.md ← how to build session-start injection +├── ingestion.md ← how to process documents/URLs +└── digest.md ← daily/weekly review generation +``` + +The plugin reads prompts from files: + +```typescript +// Prompts are files, not strings +const extractionPrompt = fs.readFileSync( + path.join(palinodeDir, 'specs/prompts/extraction.md'), 'utf-8' +); +``` + +When you tune extraction, you edit a markdown file. `git log specs/prompts/extraction.md` shows the evolution of how the system learned to think about memory. + +### Level 2: Task Prompts (instructions given to agents) + +When you spend 30 minutes writing a prompt to an agent — "build a data pipeline spec with Producer/Consumer role structure" — that prompt is the specification. The output is the compiled artifact. Losing the prompt means losing the intent, constraints, and reasoning. + +``` +specs/task-prompts/ +├── my-app/ ← M*-EXECUTE-PROMPT.md files (already doing this!) +├── onboarding/ ← assignment specs, process prompts +└── palinode/ ← research prompts, build prompts +``` + +The M-EXECUTE-PROMPT.md pattern from My App is already this practice — generalized across all work. + +**Capture rule:** When a substantial prompt produces a substantial output, save the prompt alongside the output. The memory manager should detect "this looks like a build spec or research request" and offer to save it to `specs/task-prompts/`. + +### Level 3: Meta-Prompts (the system's instructions to itself) + +- **PROGRAM.md** — how the memory manager should behave +- **AGENTS.md** — how the agent should behave +- **SOUL.md** — who the agent is + +These are already captured as files. They're the highest-level prompts in the system — everything else flows from them. + +### The Compounding Loop + +``` +PROGRAM.md defines behavior + → specs/prompts/*.md execute that behavior + → quality metrics measure results + → human updates PROGRAM.md or prompt files + → behavior improves + → next session is better than this one +``` + +Prompts are not disposable. They're the most durable artifact in the system — more durable than the code that runs them (which can be regenerated from the prompts) and more useful than the outputs (which are just one execution of the spec). + +--- + +## 10. Operational Concerns + +**Git automation:** + +- After each extraction pass: auto-commit with message `palinode: extracted N items from session {id}` +- After consolidation: `palinode: weekly consolidation {date}` +- `.palinode.db` in `.gitignore` — it's a derived index, rebuildable from files +- `logs/` in `.gitignore` — operational data, not source of truth +- Push to remote: configurable, periodic (daily cron or post-consolidation) + +**Startup and health:** + +- On startup: verify Ollama is reachable; if not, log warning but continue (files still readable, search degraded) +- File watcher daemon: systemd service with auto-restart +- Health check: `openclaw palinode status` shows: file counts, index freshness, last extraction, last consolidation, Ollama reachability + +**Backup:** + +- Primary: git remote (GitHub/Gitea/NAS) +- Secondary: files are plain text on disk — any backup tool works (rsync, Syncthing, Time Machine) +- Disaster recovery: clone the repo + `openclaw palinode reindex` rebuilds the vector index from files + +**Monitoring:** + +- `logs/operations.jsonl` for audit +- Systemd journal for file watcher daemon +- Quality metrics logged per-turn (Section 5.8) + +**Schema evolution:** + +- YAML frontmatter is additive — new fields don't break old files +- Old files without new fields use defaults +- Never require a migration to add a schema field + +**Scope:** + +- Single-user by design. Multi-user would require RLS (see OB1's pattern) and is out of scope. + +--- + +## 11. What Palinode Is Not + +- **Not a knowledge base.** It doesn't try to store everything. It stores what matters and forgets what doesn't. +- **Not a search engine.** Search is a capability, not the purpose. The purpose is making the agent smarter. +- **Not a notes app.** Humans can read the files, but Palinode is designed for agent consumption first. +- **Not coupled to OpenClaw.** The plugin is an integration layer. The service underneath is portable. +- **Not a replacement for conversation.** Palinode is context, not personality. SOUL.md, AGENTS.md, and the system prompt remain the agent's character. + +--- + +## 12. Evolution from QC MCP + +| Dimension | QC MCP (v1, Sept 2025) | Palinode (v2, 2026) | +|---|---|---| +| **Metaphor** | Library (vast, searchable, go to it) | Brain (surfaces what's relevant, consolidates, forgets) | +| **Source of truth** | PostgreSQL rows | Markdown files (git-versioned) | +| **Failure mode** | Server down = memory gone | Files on disk = always accessible | +| **Structure** | Semi-structured (domains, tags, importance) | Typed schemas (Person, Project, Decision, Insight) | +| **Consolidation** | Metabolism concept (keyword matching, never ran in production) | Weekly LLM-driven merge/supersede/archive cron | +| **Agent integration** | MCP bridge (tool the agent calls) | Plugin lifecycle hooks (part of how the agent thinks) | +| **Scale strategy** | Accumulate everything (14K contexts) | Consolidate to what matters (~200 curated files) | +| **Graph** | Separate graph-builder service (batch process) | Frontmatter cross-references (inline, always available) | +| **Retrieval** | Semantic search only | Core memory injection + semantic + metadata + entity matching | +| **Infrastructure** | 4+ services across 3 machines | Files + SQLite-vec + one daemon | + +--- + +## 13. Implementation Phases + +| Phase | Scope | Timeline | +| --- | --- | --- | +| **0: MVP** | SQLite-vec + file watcher + session-end extraction + 2 tools + Phase 1 injection | 1 week | +| **0.5: Capture** | Slack channel + Telegram formalization + watch folder + ingestion pipeline | During/after MVP | +| **1: Core Memory** | Two-phase injection + core memory files + retire MEMORY.md | Week 2 | +| **2: Consolidation** | Weekly cron + entity linking + insights extraction | Weeks 3-4 | +| **3: Migration** | Backfill from Mem0 (2,632) + QC MCP (14K) selectively | Week 4+ | +| **4: Multi-Agent + MCP** | Multi-agent read access + MCP server for external tools | Future | + +--- + +## 14. Success Criteria + +**After 1 week (MVP):** + +- [ ] Agent remembers project state across sessions without MEMORY.md +- [ ] Semantic search returns relevant results for project/people queries +- [ ] The user re-explains stable facts less often + +**After 1 month:** + +- [ ] Consolidation produces weekly summaries that are more useful than raw daily notes +- [ ] Daily digest is worth reading 4+ days/week +- [ ] Memory file count grows sub-linearly (consolidation keeps it manageable) +- [ ] Mem0 autorecall disabled — Palinode is strictly better + +**After 3 months:** + +- [ ] Multiple agents share Palinode (read access for all agent profiles) +- [ ] Palinode has survived at least one infrastructure failure without data loss +- [ ] The user trusts the system enough to stop manually curating MEMORY.md diff --git a/PRIVACY.md b/PRIVACY.md new file mode 100644 index 0000000..088cc07 --- /dev/null +++ b/PRIVACY.md @@ -0,0 +1,56 @@ +# Privacy Policy + +**Palinode** — Persistent memory for AI agents +**Effective date:** April 12, 2026 +**Last updated:** April 12, 2026 + +--- + +## 1. Open Source (Self-Hosted) + +When you run Palinode yourself using the open source package (`pip install palinode`), **no data leaves your machine** unless you explicitly configure it to. + +- **No telemetry.** Palinode does not phone home, track usage, or collect analytics. +- **No accounts required.** There is no sign-up, no API key, no registration. +- **Your data stays on your filesystem.** Memory files are markdown files in a directory you control. The index is a local SQLite database. +- **Embeddings are computed locally.** By default, Palinode uses Ollama running on your machine. No data is sent to external embedding services unless you configure a remote endpoint. +- **Git operations are your choice.** If you configure `palinode push` to sync with a remote git repository, that's your repository under your control. Palinode does not operate or have access to any remote git service. + +**We have no access to your data.** Phase Space (the company behind Palinode) does not receive, store, process, or have visibility into any data you create, index, or search with the self-hosted version. + +## 2. Optional Cloud Services (Future) + +Phase Space may offer optional hosted services in the future, such as managed API hosting, hosted embeddings, or team synchronization. If and when these services become available: + +- **Opt-in only.** No data will be sent to Phase Space services unless you explicitly enable them. +- **Data processing limited to the service.** Your data will only be processed as needed to provide the service you opted into (e.g., computing embeddings, syncing memory across team members). +- **No selling or sharing.** Your data will never be sold to or shared with third parties for advertising, training, or any other purpose. +- **Export and deletion.** You can export all your data or request deletion at any time. Your memory files remain markdown on your filesystem regardless of cloud features. +- **Transparency.** If cloud services store or process your data, we will document exactly what is stored, where, and for how long. + +This section will be updated with specific terms before any cloud service launches. + +## 3. Enterprise + +Enterprise customers who require formal data processing agreements, on-premises deployment, or compliance certifications (SOC 2, GDPR DPA, etc.) can contact us at paul@phasespace.co. Enterprise deployments are available fully on-premises with no external dependencies. + +## 4. Third-Party Services + +Palinode integrates with services you configure: + +- **Ollama** (or any OpenAI-compatible endpoint) for embeddings and LLM consolidation. Data sent to these services is governed by their respective privacy policies. +- **Git hosting** (GitHub, GitLab, etc.) if you configure remote push. Data sent to these services is governed by their respective privacy policies. + +Palinode does not require or default to any third-party service. + +## 5. Changes to This Policy + +We will update this policy as Palinode evolves. Material changes will be noted in the changelog and release notes. The effective date at the top of this document reflects the most recent revision. + +## Contact + +For privacy questions: paul@phasespace.co + +--- + +*Phase Space* diff --git a/PROGRAM.md b/PROGRAM.md index 12b33c9..b7b5ade 100644 --- a/PROGRAM.md +++ b/PROGRAM.md @@ -63,7 +63,7 @@ Extract only things that will be useful **across sessions** — facts that a fut - *Example:* "Alice uses VS Code + Gemini 3.1 Pro (High) as default for executing milestone build specs." → Preference (tool + workflow). - *Example:* "Don't comment on time of day or suggest quitting." → Preference (communication). Already known — likely NOOP. -- *NOT example:* Paul used vim once in a session → don't infer "prefers vim." Single instances aren't preferences. +- *NOT example:* the user opened vim once in a session → don't infer "prefers vim." Single instances aren't preferences. **Technical context** — extract when it represents a *decision*, not just mentioned in passing. @@ -120,7 +120,7 @@ id: person-{slug} category: person name: Full Name aliases: [nickname, shortened] -role: their relationship to Paul +role: their relationship to the user core: false # set true for inner circle entities: [project/related-project] last_contact: 2026-03-22 @@ -129,7 +129,7 @@ last_updated: 2026-03-22T16:00:00Z # Full Name ## Context -Who they are, how Paul knows them, what their role is. +Who they are, how the user knows them, what their role is. ## Preferences & Communication How they like to work, communication style, things to remember. @@ -275,7 +275,7 @@ last_updated: 2026-03-22T16:00:00Z - Bullet list of the important takeaways. ## Relevance -Why this matters for Paul's work. +Why this matters for the user's work. ``` --- diff --git a/README.md b/README.md index 9fe16ef..b50add1 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,16 @@ -# Palinode + -**Persistent memory for AI agents. Markdown in, markdown out, git everything.** +``` +┌─ palinode ─┐ +│ ░░░░░░░░░░ │ +│ ▓▓▓▓▓▓▓▓▓▓ │ +│ ██████████ │ +└────────────┘ +``` -Your agent's memory is a folder of markdown files. Palinode indexes them with hybrid search, compacts them with an LLM, and exposes them through any interface you want — MCP, REST API, CLI, or plugin hooks. If every service crashes, `cat` still works. +**The memory substrate for AI agents and developer tools. Git-versioned, file-native, MCP-first.** + +Your agent's memory is a folder of markdown files. Palinode indexes them with hybrid search, compacts them with an LLM, and serves them through MCP — so the same memory works in Claude Code, Cursor, Windsurf, Zed, VS Code (Continue/Cline), and any other MCP-compatible editor. Enterprises can govern AI memory the same way they govern code. If every service crashes, `cat` still works. *A palinode is a poem that retracts what was said before and says it better. That's what memory compaction does.* @@ -17,7 +25,7 @@ Files (markdown + YAML frontmatter) ↓ watched Index (SQLite-vec vectors + FTS5 keywords, single .db file) ↓ queried by -Interfaces (MCP server, REST API, CLI, plugin hooks) +Interfaces (MCP server, REST API, CLI, OpenClaw plugin) ↓ compacted by LLM (proposes ops → deterministic executor applies them → git commits) ``` @@ -32,10 +40,10 @@ Palinode doesn't care how you talk to it. The same 17 tools work everywhere: | Interface | Transport | Best For | |-----------|-----------|----------| -| **MCP Server** | Streamable HTTP or stdio | Claude Code, Claude Desktop, Cursor, Zed | +| **MCP Server** | Streamable HTTP or stdio | Claude Code, Claude Desktop, Cursor, Windsurf, Zed, VS Code (Continue/Cline) | | **REST API** | HTTP on :6340 | Scripts, webhooks, custom integrations | | **CLI** | Wraps REST API | Cron jobs, SSH, shell scripts (8x fewer tokens than MCP) | -| **Plugin** | Agent lifecycle hooks | Agent frameworks with inject/extract patterns | +| **Plugin** | OpenClaw lifecycle hooks | Agent frameworks with inject/extract patterns | Set up once on a server. Connect from any machine, any IDE, any agent framework. The MCP server is a pure HTTP client — it holds no state, no database connection, no embedder. Point it at the API and go. @@ -47,7 +55,7 @@ Set up once on a server. Connect from any machine, any IDE, any agent framework. } ``` -That's the entire client config for Claude Code, Claude Desktop, Cursor, or Zed. See [docs/MCP-SETUP.md](docs/MCP-SETUP.md) for multi-IDE setup, or [docs/INSTALL-CLAUDE-CODE.md](docs/INSTALL-CLAUDE-CODE.md) for Claude Code specifically. +That's the entire client config. Works with Claude Code, Claude Desktop, Cursor, Windsurf, Zed, and VS Code (Continue/Cline). See [docs/MCP-SETUP.md](docs/MCP-SETUP.md) for editor-specific paths. --- @@ -65,6 +73,34 @@ That's the entire client config for Claude Code, Claude Desktop, Cursor, or Zed. --- +## Getting started in 60 seconds (Claude Code) + +Already have Palinode installed and `palinode-api` running? Drop it into any +project in one command: + +```bash +cd your-project +palinode init +``` + +That scaffolds: + +- `.claude/CLAUDE.md` — memory instructions for the agent (appended if one + already exists) +- `.claude/settings.json` — a `SessionEnd` hook that auto-captures on `/clear`, + logout, and normal exit +- `.claude/hooks/palinode-session-end.sh` — the hook script itself +- `.mcp.json` — points Claude Code at the `palinode` MCP server + +Open the project in Claude Code and your agent will search prior context on +startup, save decisions as you work, and snapshot the session on `/clear`. No +server restarts, no settings menus, no copy-paste. + +Re-run with `--dry-run` to preview, `--force` to overwrite, or `--no-mcp` +/ `--no-hook` to scope what gets installed. + +--- + ## Quick Start ```bash @@ -89,6 +125,43 @@ curl http://localhost:6340/status > Your memory directory is **private**. It contains personal data. Never make it public. The code repo contains zero memory files. +> For a pre-populated demo, copy `examples/sample-memory/` to `~/.palinode/`. + +--- + +## Usage Examples + +### Save a decision, recall it later + +```bash +# During a session — save a decision +palinode save --type Decision "Chose SQLite over Postgres for the cache layer. \ + Reason: no ops burden, single-file deployment, good enough for our scale." + +# Next week — search for it +palinode search "database decision for cache" +``` + +### End-of-session capture + +```bash +# Agent calls at end of coding session +palinode session-end \ + --summary "Migrated auth from JWT to session tokens" \ + --decisions "Session tokens stored server-side, 24h expiry" \ + --blockers "Need to update mobile client auth flow" +``` + +### Audit trail — who decided what and when + +```bash +# Trace a fact back to when it was recorded +palinode blame decisions/auth-migration.md + +# See what changed across all memory in the last week +palinode diff --days 7 +``` + --- ## Tools @@ -103,11 +176,11 @@ curl http://localhost:6340/status | `read` | Read the full content of a memory file | | `ingest` | Fetch a URL and save as research | | `status` | Health check — file counts, index stats, service status | -| `history` | Git history with diff stats, rename tracking, and limit | | `entities` | Entity graph — cross-references between memories | | `consolidate` | Preview or run LLM-powered compaction | | `diff` | What changed in the last N days | | `blame` | Trace a fact back to the commit that recorded it | +| `history` | Git history for a file with diff stats and rename tracking | | `rollback` | Revert a file to a previous commit (safe, creates new commit) | | `push` | Sync memory to a remote git repo | | `trigger` | Prospective recall — auto-inject when a topic comes up | @@ -146,12 +219,14 @@ status: active entities: [person/paul] last_updated: 2026-04-05T00:00:00Z summary: "Persistent memory for AI agents." +canonical_question: "What is Palinode and what does it do?" --- # Palinode Your content here. As detailed or brief as you want. Files marked `core: true` are always in context. Everything else is retrieved on demand via hybrid search. +The `canonical_question` field anchors the file to the question it answers, improving search relevance. ``` Open your memory directory as an [Obsidian](https://obsidian.md) vault for visual browsing. See [docs/OBSIDIAN-SETUP.md](docs/OBSIDIAN-SETUP.md). @@ -196,7 +271,7 @@ All models are swappable. Any Ollama embedding model, any OpenAI-compatible chat - **Ollama** with `bge-m3` (`ollama pull bge-m3`) - **Git** -Optional: a chat model for consolidation (any 7B+ works). +Optional: a chat model for consolidation (any 7B+ works), OpenClaw for agent plugin hooks. --- @@ -242,6 +317,8 @@ Optional: a chat model for consolidation (any 7B+ works). ## What's Unique +- **Your data, your files** — No accounts, no cloud dependency, no vendor lock-in. Your memory is markdown files in a directory you control. Export is `cp`. Backup is `git push`. Whatever happens to any tool in this ecosystem, your data is plain text on your filesystem. +- **Cross-IDE memory** — Your memory lives in one place. Connect from Claude Code, Cursor, Windsurf, Zed, or any MCP-compatible editor. Switch IDEs without losing context. - **Git operations as agent tools** — `diff`, `blame`, `rollback`, `push` exposed via MCP. No other system makes git ops callable by the agent. - **Operation-based compaction** — KEEP/UPDATE/MERGE/SUPERSEDE/ARCHIVE DSL. LLM proposes, deterministic executor disposes. Every compaction is a reviewable git commit. - **Per-fact addressability** — `` IDs inline in markdown, invisible in rendering, preserved by git, targetable by compaction. @@ -263,7 +340,7 @@ If you know of prior art we missed, please [open an issue](https://github.com/ph ## License -MIT +MIT — [Privacy Policy](PRIVACY.md) --- diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..4377238 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,33 @@ +# Security Policy + +## Reporting a Vulnerability + +If you discover a security vulnerability in Palinode, please report it responsibly. + +**Email:** paul@phasespace.co + +**What to include:** +- Description of the vulnerability +- Steps to reproduce +- Potential impact +- Suggested fix (if you have one) + +**Response timeline:** +- Acknowledgment within 48 hours +- Assessment and plan within 7 days +- Fix released as soon as practical, with credit to the reporter (unless you prefer anonymity) + +**Please do not:** +- Open a public GitHub issue for security vulnerabilities +- Exploit the vulnerability beyond what's needed to demonstrate it + +## Scope + +Palinode runs locally on your machine. The primary attack surface is: +- Path traversal in file operations (mitigated: all paths validated against PALINODE_DIR) +- API endpoint abuse (mitigated: rate limiting, request size limits) +- LLM prompt injection via memory content (mitigated: deterministic executor, LLM never writes files directly) + +## Supported Versions + +Security fixes are applied to the latest release only. diff --git a/claude-plugin/.claude-plugin/plugin.json b/claude-plugin/.claude-plugin/plugin.json index 544c894..f35e346 100644 --- a/claude-plugin/.claude-plugin/plugin.json +++ b/claude-plugin/.claude-plugin/plugin.json @@ -1,13 +1,13 @@ { "name": "palinode", "description": "Persistent memory for AI agents. Git-versioned markdown files as source of truth, hybrid SQLite-vec + FTS5 search, deterministic compaction. Every line in your agent's brain has a git blame.", - "version": "0.7.0", + "version": "0.6.1", "author": { "name": "Paul Kyle", "url": "https://github.com/Paul-Kyle" }, - "homepage": "https://github.com/Paul-Kyle/palinode", - "repository": "https://github.com/Paul-Kyle/palinode", + "homepage": "https://github.com/phasespace-labs/palinode", + "repository": "https://github.com/phasespace-labs/palinode", "license": "MIT", "keywords": [ "memory", diff --git a/claude-plugin/README.md b/claude-plugin/README.md index 0f4fc93..c32c109 100644 --- a/claude-plugin/README.md +++ b/claude-plugin/README.md @@ -38,7 +38,7 @@ curl http://localhost:11434/api/tags Install from source (PyPI publish coming soon): ```bash -git clone https://github.com/Paul-Kyle/palinode.git +git clone https://github.com/phasespace-labs/palinode.git cd palinode pip install -e . ``` @@ -85,7 +85,7 @@ systemctl --user enable --now palinode-api palinode-watcher Once Palinode is installed and the services are running, install this Claude Code plugin: ``` -/plugin install palinode@Paul-Kyle +/plugin install palinode@phasespace-labs ``` Or, during development, point Claude Code at this directory directly: @@ -108,7 +108,7 @@ Once installed and connected, the plugin exposes 17 MCP tools to Claude Code: - `palinode_entities` — entity graph traversal (people, projects, decisions) ### Save and capture -- `palinode_save` — write a new memory item. Supports type, entities, core flag, slug, source. As of v0.6.2, saves run write-time contradiction checking in the background (ADR-004). +- `palinode_save` — write a new memory item. Supports type, entities, core flag, slug, source. As of v0.6.0, saves run write-time contradiction checking in the background. - `palinode_session_end` — capture session outcomes (summary, decisions, blockers) to daily notes and project status files - `palinode_ingest` — fetch a URL and store it as a research reference @@ -160,11 +160,10 @@ Run `palinode_status` and check `total_files` and `fts_chunks`. If both are 0, t ## Learn more -- [Main repository](https://github.com/Paul-Kyle/palinode) -- [CHANGELOG](https://github.com/Paul-Kyle/palinode/blob/main/docs/CHANGELOG.md) for what's in v0.7.0 -- [Compaction demo](https://github.com/Paul-Kyle/palinode/tree/main/examples/compaction-demo) — walkthrough of a memory file across three consolidation passes with blame + diff output -- [ADR-001: Tools Over Pipeline](https://github.com/Paul-Kyle/palinode/blob/main/ADR-001-tools-over-pipeline.md) — why the executor is deterministic +- [Main repository](https://github.com/phasespace-labs/palinode) +- [CHANGELOG](https://github.com/phasespace-labs/palinode/blob/main/docs/CHANGELOG.md) for what's in v0.6.1 +- [Compaction demo](https://github.com/phasespace-labs/palinode/tree/main/examples/compaction-demo) — walkthrough of a memory file across three consolidation passes with blame + diff output ## License -MIT. See [LICENSE](https://github.com/Paul-Kyle/palinode/blob/main/LICENSE) in the main repository. +MIT. See [LICENSE](https://github.com/phasespace-labs/palinode/blob/main/LICENSE) in the main repository. diff --git a/docs/ACKNOWLEDGMENTS.md b/docs/ACKNOWLEDGMENTS.md index aaf41b7..ee21d9f 100644 --- a/docs/ACKNOWLEDGMENTS.md +++ b/docs/ACKNOWLEDGMENTS.md @@ -9,4 +9,4 @@ Palinode builds on ideas from across the agent memory landscape: - [Hermes](https://github.com/NousResearch/hermes-agent) — FTS5 sanitization patterns, security scanning - [OB1](https://github.com/NateBJones-Projects/OB1) — two-door capture pattern (structured vs unstructured intake) -If you know of prior art we missed, please [open an issue](https://github.com/Paul-Kyle/palinode/issues). +If you know of prior art we missed, please [open an issue](https://github.com/phasespace-labs/palinode/issues). diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 978a3f3..b6c0d03 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -2,70 +2,20 @@ All notable changes to Palinode. Format follows [Keep a Changelog](https://keepachangelog.com/en/1.1.0/). -## [0.7.0] — 2026-04-12 +## Unreleased ### Added -**Search quality** -- **Score-gap dedup** (#91) — additional chunks from the same file are kept only if within `dedup_score_gap` (default 0.2) of the file's best score. Reduces noise from multi-section files dominating results. -- **G1 context boost fix** (#92) — `store.search()` now accepts `context_entities` for ADR-008 ambient context boost. Previously the boost only fired through `search_hybrid`. -- **Raw cosine exposure** (#94) — search results include a `raw_score` field with the original cosine similarity before RRF normalization. -- **Daily penalty** (#93) — `daily/` files receive `score * daily_penalty` (default 0.3) to prevent daily notes from dominating search results. New `include_daily` parameter opts out of the penalty. Exposed as an MCP tool parameter. -- **Canonical question frontmatter** (#83) — `canonical_question` frontmatter field (string or list) is prepended as `"Q: ..."` to the first chunk before embedding, anchoring each memory to the question it answers. -- **Confidence field + content_hash in frontmatter** (#113, #114) — new `confidence` field for memory files, full SHA-256 content hash stored in frontmatter for integrity verification. +**Search quality (M0.5)** +- **Score-gap dedup** (#91, `52966ae`) — additional chunks from the same file are kept only if within `dedup_score_gap` (default 0.2) of the file's best score. Reduces noise from multi-section files dominating results. +- **G1 context boost fix** (#92, `f156a3d`) — `store.search()` now accepts `context_entities` for ambient context boost. Previously the boost only fired through `search_hybrid`. +- **Raw cosine exposure** (#94, `ee8931a`) — search results include a `raw_score` field with the original cosine similarity before RRF normalization. +- **Daily penalty** (#93, `d18240c`) — `daily/` files receive `score * daily_penalty` (default 0.3) to prevent daily notes from dominating search results. New `include_daily` parameter opts out of the penalty. Exposed as an MCP tool parameter. +- **Canonical question frontmatter** (#83, `dbe5703`) — `canonical_question` frontmatter field (string or list) is prepended as `"Q: ..."` to the first chunk before embedding, anchoring each memory to the question it answers. -**Security** -- CORS, rate limiting, request size limits, stack trace sanitization for the API server -- MCP audit log — structured JSONL tool call logging (#116) - -**CI/CD** -- GitHub Actions pipeline — unit tests + security scan (#121) - -**Testing** -- Integration test suite — 14 API roundtrip tests (#120) +**Tests** - 175 tests passing (up from 149) -**Documentation** -- Multi-platform MCP setup guide (`docs/MCP-SETUP.md`) — Claude Code, Cursor, VS Code, Zed, Windsurf -- PyPI-ready pyproject.toml with metadata and classifiers - -### Changed - -- `palinode_timeline` merged into `palinode_history` — one tool with `--follow`, diff stats, structured JSON return, and `limit` parameter -- Tool count: 18 → 17 (timeline/history consolidated) -- README repositioned as memory substrate, not just persistent memory - -### Removed - -- Migration endpoints and CLI commands (`palinode.migration` module removed) -- `docs/claude-code-setup.md` — replaced by `docs/MCP-SETUP.md` - ---- - -## [0.6.0] — 2026-04-11 - -### Added - -**Write-time contradiction check (ADR-004)** -- When saving a memory, the system checks for contradictions against existing files in the same entity scope -- Contradiction candidates surfaced before the save completes, with configurable thresholds - -**Ambient context search (ADR-008)** -- Search results boosted by project context inferred from the caller's working directory -- Resolution chain: `PALINODE_PROJECT` env var → config project map → CWD auto-detect - -**RETRACT operation** -- New executor operation: `RETRACT` — marks a memory fact as wrong with a visible tombstone -- Strikethrough formatting with `[RETRACTED date — reason]` annotation -- Fact ID preserved so readers know what was retracted and why - -**Claude Code plugin scaffold** -- `claude-plugin/` directory with plugin manifest for Claude Code marketplace submission - -**Claude Code skills** -- `palinode-claude-code` — MCP setup and usage for Claude Code sessions -- `palinode-session` — automatic session lifecycle memory capture - --- ## [0.5.0] — 2026-04-10 @@ -88,8 +38,8 @@ First tagged release. Persistent memory for AI agents with git-versioned markdow - Prompt versioning system — extraction/compaction prompts stored as memory files with `active: true` frontmatter **Interfaces (all four expose the same capabilities)** -- **MCP server** — Streamable HTTP transport (also supports stdio). Stateless HTTP client, point it at any Palinode API server -- **REST API** — FastAPI on port 6340, 20+ endpoints covering search, save, diff, triggers, history, blame, rollback, consolidation, session-end, lint +- **MCP server** — Streamable HTTP transport (also supports stdio) with 18 tools. Stateless HTTP client, point it at any Palinode API server +- **REST API** — FastAPI on port 6340, 20+ endpoints covering search, save, diff, triggers, history, blame, rollback, consolidation, session-end, lint, migrate - **CLI** — 26 commands wrapping the REST API via Click. TTY-aware (human output interactive, JSON when piped). Remote access via `PALINODE_API` env var - **Plugin** — OpenClaw lifecycle hooks for agent frameworks with inject/extract patterns @@ -104,6 +54,9 @@ First tagged release. Persistent memory for AI agents with git-versioned markdow - Session-end hook for Claude Code — auto-captures sessions on exit, idempotent, non-blocking - Entity extraction from daily notes with keyword fallback for untagged content +**Migration** +- `palinode migrate` — import existing markdown memory systems (OpenClaw format) with `--review` mode for dry-run inspection + **Security and hardening** - Path validation on all file operations (rejects `..`, symlinks outside memory directory) - Secret scrubbing on save path via configurable regex patterns @@ -115,7 +68,7 @@ First tagged release. Persistent memory for AI agents with git-versioned markdow - Compaction walkthrough (`examples/compaction-demo/`) — a memory file across 3 passes with blame + diff output **Tests** -- 92 tests covering parser, store, executor, API, CLI, and hybrid search +- 92 tests covering parser, store, executor, API, CLI, migration, and hybrid search ### Changed - All inference is local by default. Cloud API keys (Gemini, OpenAI) are opt-in via environment variables @@ -126,6 +79,7 @@ First tagged release. Persistent memory for AI agents with git-versioned markdow ### Fixed - Watcher no longer crashes the API server if the memory directory is temporarily unavailable - CLI display keys match API response keys across all commands +- Migration tool correctly handles frontmatter with embedded colons ### Removed - Deprecated SSE MCP transport (replaced by Streamable HTTP per canonical MCP SDK pattern) diff --git a/docs/DEPLOYMENT-GUIDE.md b/docs/DEPLOYMENT-GUIDE.md index e4b62b5..8a03867 100644 --- a/docs/DEPLOYMENT-GUIDE.md +++ b/docs/DEPLOYMENT-GUIDE.md @@ -32,7 +32,7 @@ Install Palinode on **one** machine (the server). Client machines need zero loca ```bash # Install -git clone https://github.com/Paul-Kyle/palinode +git clone https://github.com/phasespace-labs/palinode cd palinode python3 -m venv venv && source venv/bin/activate pip install -e . diff --git a/docs/GIT-MEMORY.md b/docs/GIT-MEMORY.md index 30ef91d..0eea69b 100644 --- a/docs/GIT-MEMORY.md +++ b/docs/GIT-MEMORY.md @@ -7,7 +7,7 @@ Palinode treats memory as code. Every fact, decision, and project detail your ag Because the memory directory is just a Git repository, Palinode provides built-in tools to inspect it without requiring you to use the Git CLI manually. These tools are available via: - **CLI Commands:** for human administrators. - **MCP Tools:** for LLMs via Claude Code. -- **Plugin hooks:** read-only tools for the chat agent. +- **OpenClaw Plugin:** read-only tools for the chat agent. ### 1. Diff @@ -41,12 +41,12 @@ palinode blame decisions/app-five-modules.md --search "5 modules" # (origin: 2026-04-06, source: palinode — dates match) ``` -### 3. Timeline +### 3. History -Watch a structured memory evolve. Timeline shows all changes to a specific file, ordered chronologically. +Watch a structured memory evolve. History shows all commits that touched a specific file, with diff stats and rename tracking. ```bash -palinode timeline projects/my-app.md +palinode history projects/my-app.md ``` ### 4. Rollback (Admin Only) diff --git a/docs/HOW-MEMORY-WORKS.md b/docs/HOW-MEMORY-WORKS.md index 7095caa..17a02ea 100644 --- a/docs/HOW-MEMORY-WORKS.md +++ b/docs/HOW-MEMORY-WORKS.md @@ -49,7 +49,7 @@ graph TD ## 1. Session Recall (Every Agent Turn) -**Hook:** `before_agent_start` in the agent plugin +**Hook:** `before_agent_start` in the OpenClaw plugin Every time you send a message, Palinode injects relevant context **before the agent sees your message**. This happens in four phases: @@ -120,7 +120,7 @@ Before injection, all content passes through `specs/scrub-patterns.yaml` — reg ## 2. Session Capture (End of Every Turn) -**Hook:** `agent_end` in the agent plugin +**Hook:** `agent_end` in the OpenClaw plugin After each agent response, the plugin captures the conversation to a daily note: @@ -289,9 +289,7 @@ Each chunk is hashed before embedding. If the hash matches the existing entry, t ## 6. Git Versioning (Every Change) -**Repo:** `Paul-Kyle/palinode-data` (PRIVATE) - -Every memory change is a git commit. This enables: +Memories live in a git repository — typically a private repo you own, local or pushed to a remote of your choice. Every memory change is a git commit. This enables: | Tool | What It Does | Example | | --- | --- | --- | diff --git a/docs/INSTALL-CLAUDE-CODE.md b/docs/INSTALL-CLAUDE-CODE.md index 4144ac0..b49f51b 100644 --- a/docs/INSTALL-CLAUDE-CODE.md +++ b/docs/INSTALL-CLAUDE-CODE.md @@ -1,6 +1,6 @@ # Installing Palinode with Claude Code -Palinode gives Claude Code persistent memory via MCP — 17 tools for searching, saving, and managing memories across sessions. The `palinode-session` skill auto-captures milestones and decisions during coding, so your memory stays fresh without manual effort. +Palinode gives Claude Code persistent memory via MCP — 18 tools for searching, saving, and managing memories across sessions. The `palinode-session` skill auto-captures milestones and decisions during coding, so your memory stays fresh without manual effort. ## Prerequisites @@ -30,7 +30,7 @@ Best if you run Claude Code on the same machine as Palinode. ### 1. Install Palinode ```bash -git clone https://github.com/Paul-Kyle/palinode.git ~/palinode +git clone https://github.com/phasespace-labs/palinode.git ~/palinode cd ~/palinode python3 -m venv venv && source venv/bin/activate pip install -e . @@ -201,7 +201,7 @@ The MCP endpoint is `http://your-server:6341/mcp`. Configure your IDE: ### Network Access The MCP server needs to be reachable from your IDE. Options: -- **VPN / overlay network**: Use a VPN or overlay network IP if the server is on a different network +- **Tailscale** (recommended): Install on both machines, use the Tailscale IP (e.g., `http://100.x.x.x:6341/mcp`) - **LAN**: Use the server's local IP if on the same network - **SSH tunnel**: `ssh -L 6341:localhost:6341 youruser@your-server` then use `http://localhost:6341/mcp` @@ -279,12 +279,11 @@ Search palinode for "recent project decisions" | `palinode_read` | Read the full content of a specific memory file | | `palinode_ingest` | Fetch a URL and save as research reference | | `palinode_status` | File counts, index health, entity graph size | -| `palinode_history` | Git history for a specific memory file | +| `palinode_history` | Git history for a file with diff stats and rename tracking | | `palinode_entities` | List known entities and their relationships | | `palinode_consolidate` | Run or preview the weekly compaction job | | `palinode_diff` | See what changed in memory recently (git diff) | | `palinode_blame` | Who/when each line was written (git blame) | -| `palinode_history` | Git history with diff stats and rename tracking | | `palinode_rollback` | Revert a file to a previous state | | `palinode_push` | Push memory changes to remote git | | `palinode_trigger` | Register a prospective recall trigger | diff --git a/docs/OPERATIONS.md b/docs/OPERATIONS.md new file mode 100644 index 0000000..6d9d01b --- /dev/null +++ b/docs/OPERATIONS.md @@ -0,0 +1,290 @@ +# Palinode Operations Guide + +How to upgrade, recover from crashes, and maintain a healthy Palinode installation. + +--- + +## Core Safety Guarantee + +**Your markdown files are the source of truth.** The SQLite database, vector index, and FTS5 keyword index are all derived from files. If anything goes wrong with the database, delete it and reindex. Your memories are safe as long as the files exist. + +``` +Files (markdown + YAML frontmatter) ← source of truth, git-versioned + ↓ derived +Database (.palinode.db) ← rebuild anytime with `palinode reindex` +``` + +--- + +## Upgrading + +### Standard upgrade + +```bash +# 1. Backup (always, even if you trust git) +cp -r ~/.palinode ~/.palinode-backup-$(date +%Y%m%d) + +# 2. Update code +cd /path/to/palinode +git pull +pip install -e . + +# 3. Restart services +systemctl --user restart palinode-api palinode-watcher +# Or however you run them (screen, tmux, Docker, etc.) + +# 4. Verify +palinode doctor +palinode status + +# 5. Reindex to pick up new features +palinode reindex +``` + +### What reindex does + +For each `.md` file in your memory directory: + +1. **Parse** — reads frontmatter and splits body into sections +2. **Hash compare** — computes SHA-256 of each section, checks against stored hash +3. **Skip unchanged** — if hash matches, no Ollama call (zero cost) +4. **Re-embed changed** — if hash differs, calls Ollama BGE-M3 for a new embedding +5. **Update entities** — refreshes the entity graph from frontmatter +6. **Rebuild FTS5** — drops and recreates the keyword search index + +**Reindex is safe to run on a live system.** Searches continue to work during reindex. The only brief lock is during FTS5 rebuild (milliseconds). + +### What uses Ollama + +| Operation | Needs Ollama? | Model | When | +|-----------|:---:|-------|------| +| Reindex (unchanged files) | No | — | Hash matches, skipped | +| Reindex (changed files) | Yes | BGE-M3 | Embeds new content | +| Search | Yes | BGE-M3 | Embeds the query | +| Save | Yes | BGE-M3 | Embeds on write | +| Summary generation | Yes | Chat model | Only for `core: true` files missing summaries | +| List, read, diff, blame, rollback | No | — | File/git operations only | + +If Ollama is unreachable during reindex, embedding failures are logged and skipped. The file is not indexed until Ollama comes back and you reindex again. + +--- + +## Recovery Scenarios + +### Database corrupted or missing + +```bash +# Delete the database +rm ~/.palinode/.palinode.db + +# Rebuild from files +palinode reindex +``` + +Your memories are untouched. The database is rebuilt from scratch. This takes a few minutes for large memory stores (one Ollama call per file section). + +### Ollama is down + +Everything except search and save continues to work: + +| Works without Ollama | Needs Ollama | +|---------------------|-------------| +| `palinode list` | `palinode search` | +| `palinode read` | `palinode save` (embedding step) | +| `palinode diff` | `palinode reindex` (embedding step) | +| `palinode blame` | | +| `palinode history` | | +| `palinode rollback` | | +| `palinode push` | | +| `palinode lint` | | + +To check Ollama connectivity: +```bash +palinode doctor +``` + +### API server won't start + +Check the basics: +```bash +# Is the port in use? +lsof -i :6340 + +# Check logs +journalctl --user -u palinode-api --since "5 minutes ago" + +# Try running manually to see errors +PALINODE_DIR=~/.palinode palinode-api +``` + +Common causes: +- Another process on port 6340 +- Missing `PALINODE_DIR` environment variable +- Python dependencies changed (run `pip install -e .`) + +### FTS5 index corrupted + +Symptoms: keyword searches return errors or no results, but vector search works. + +```bash +# Rebuild just the keyword index (fast, no Ollama needed) +palinode rebuild-fts +``` + +### Git history issues + +Palinode auto-commits on save. If git gets into a bad state: + +```bash +cd ~/.palinode + +# Check status +git status + +# If there are uncommitted changes +git add -A && git commit -m "manual recovery commit" + +# If HEAD is detached +git checkout main +``` + +### Watcher crashes with "inotify watch limit reached" (Linux) + +The file watcher uses inotify to detect changes. Large memory directories can exceed the default Linux limit. + +```bash +# Check current limit +cat /proc/sys/fs/inotify/max_user_watches + +# Increase (immediate) +sudo sysctl -w fs.inotify.max_user_watches=524288 + +# Make permanent +echo 'fs.inotify.max_user_watches=524288' | sudo tee -a /etc/sysctl.conf + +# Restart watcher +systemctl --user restart palinode-watcher +``` + +### File accidentally deleted + +```bash +cd ~/.palinode + +# Find the last commit that had the file +git log --all -- path/to/deleted-file.md + +# Restore it +git checkout -- path/to/deleted-file.md + +# Reindex to update the database +palinode reindex +``` + +### Memory file has wrong content + +Every save is a git commit. Use Palinode's built-in tools: + +```bash +# See the file's history +palinode history path/to/file.md + +# See what changed +palinode blame path/to/file.md + +# Revert to a previous version (creates a new commit, safe) +palinode rollback path/to/file.md +``` + +Or use the MCP tools from your IDE — `palinode_history`, `palinode_blame`, `palinode_rollback` do the same thing. + +--- + +## Maintenance + +### Health check + +```bash +palinode doctor +``` + +Reports: API connectivity, Ollama reachability, file count, embedding health. + +### Lint + +```bash +palinode lint +``` + +Scans for: orphaned files, stale active files (>90 days), missing frontmatter fields, missing descriptions, core file count. + +### Disk usage + +The database is typically 1-5% the size of your memory files. For reference: +- 100 memory files → ~2MB database +- 1,000 memory files → ~20MB database + +The largest component is the vector index (1024 floats per chunk). + +### Log files + +- **API operations log:** `{PALINODE_DIR}/logs/operations.jsonl` +- **MCP audit log:** `{PALINODE_DIR}/.audit/mcp-calls.jsonl` (every tool call with timing) + +### Backup strategy + +Your memory directory is a git repo. The simplest backup: + +```bash +# Push to a remote (GitHub, GitLab, private server) +palinode push + +# Or manually +cd ~/.palinode && git push origin main +``` + +For belt-and-suspenders: +```bash +# Periodic filesystem backup +cp -r ~/.palinode /backup/palinode-$(date +%Y%m%d) +``` + +The `.palinode.db` file does NOT need to be backed up — it's rebuilt from files with `palinode reindex`. + +--- + +## Environment Variables + +| Variable | Default | Purpose | +|----------|---------|---------| +| `PALINODE_DIR` | `~/.palinode` | Memory directory root | +| `PALINODE_API_HOST` | `127.0.0.1` | API bind address | +| `PALINODE_CORS_ORIGINS` | `http://localhost:3000,http://127.0.0.1:3000` | Allowed CORS origins (comma-separated) | +| `PALINODE_RATE_LIMIT_SEARCH` | `100` | Max search requests per minute per IP | +| `PALINODE_RATE_LIMIT_WRITE` | `30` | Max write requests per minute per IP | +| `PALINODE_MAX_REQUEST_BYTES` | `5242880` (5MB) | Max request body size | +| `PALINODE_HARNESS` | auto-detected | Harness identity for scoped memory | +| `PALINODE_PROJECT` | auto-detected from CWD | Project context for ambient search boost | +| `PALINODE_MEMBER` | none | Member identity for scoped memory | + +--- + +## Systemd Setup (Linux) + +Example service files are in the `systemd/` directory of the repo. + +```bash +# Copy to user systemd directory +cp systemd/palinode-api.service ~/.config/systemd/user/ +cp systemd/palinode-watcher.service ~/.config/systemd/user/ + +# Edit paths and environment variables +# Then: +systemctl --user daemon-reload +systemctl --user enable palinode-api palinode-watcher +systemctl --user start palinode-api palinode-watcher + +# Check status +systemctl --user status palinode-api +systemctl --user status palinode-watcher +``` diff --git a/examples/compaction-demo/README.md b/examples/compaction-demo/README.md index 56b640c..da9bc44 100644 --- a/examples/compaction-demo/README.md +++ b/examples/compaction-demo/README.md @@ -41,7 +41,7 @@ Between pass 1 and pass 2 (9 operations): - 4 × `ARCHIVE` — moved superseded entries to `archive/2026/my-app-status.md` - 2 × `UPDATE` — final wording pass on merged lines -All 22 operations were **proposed by an LLM** but **applied by deterministic Python** (`palinode/consolidation/executor.py`). The LLM never touches the file directly — it only emits JSON like `{"op": "SUPERSEDE", "id": "f-0317-1", "superseded_by": "f-0324-2", "reason": "stripe integration actually shipped on the 24th"}` and the executor validates and applies it. That's the [ADR-001](../../ADR-001-tools-over-pipeline.md) invariant in action. +All 22 operations were **proposed by an LLM** but **applied by deterministic Python** (`palinode/consolidation/executor.py`). The LLM never touches the file directly — it only emits JSON like `{"op": "SUPERSEDE", "id": "f-0317-1", "superseded_by": "f-0324-2", "reason": "stripe integration actually shipped on the 24th"}` and the executor validates and applies it. ## Why this matters diff --git a/examples/hooks/README.md b/examples/hooks/README.md new file mode 100644 index 0000000..feaf4dc --- /dev/null +++ b/examples/hooks/README.md @@ -0,0 +1,63 @@ +# Palinode Claude Code hooks + +Drop-in hooks that auto-capture Claude Code sessions to Palinode. + +## What's here + +| File | What it does | +|------|--------------| +| `palinode-session-end.sh` | SessionEnd hook — captures a snapshot of the transcript to palinode-api on session exit, including `/clear`, logout, and normal exit | +| `settings.json` | The Claude Code hook registration that points at the script | + +## Zero-friction install + +From your project root: + +```bash +palinode init +``` + +That scaffolds everything below into the current project — `.claude/CLAUDE.md`, +`.claude/settings.json`, the hook script, and `.mcp.json`. Idempotent; re-run with +`--force` to overwrite. + +## Manual install + +If you prefer to wire it up by hand: + +```bash +mkdir -p .claude/hooks +cp palinode-session-end.sh .claude/hooks/ +chmod +x .claude/hooks/palinode-session-end.sh +cp settings.json .claude/settings.json # or merge into an existing one +``` + +Make sure `palinode-api` is running (default: `http://localhost:6340`). Override +with `PALINODE_API_URL` if you run it on another host. + +## Why `/clear` matters + +`/clear` in Claude Code resets the conversation context. Without a hook, every +insight, decision, and bug root cause from that session vanishes. The SessionEnd +hook fires on `/clear` (matcher: `clear`), so even if you forget to call +`palinode_session_end` manually, a fallback snapshot is captured. + +For the best record, have the agent call `palinode_session_end` explicitly +*before* `/clear` runs — the hook's fallback only has the transcript to work +with, whereas the agent can synthesize a structured summary with decisions and +blockers. + +## Tuning + +Environment variables the hook respects: + +| Variable | Default | Purpose | +|----------|---------|---------| +| `PALINODE_API_URL` | `http://localhost:6340` | Where to POST the capture | +| `PALINODE_HOOK_MIN_MESSAGES` | `3` | Minimum user messages before capture fires (skips trivial sessions) | + +## Fail-silent + +The hook is designed to never block Claude Code exit. If the API is down, the +capture is dropped and the hook exits 0. Check `palinode status` to verify the +API is reachable — and re-run sessions that matter. diff --git a/examples/hooks/palinode-session-end.sh b/examples/hooks/palinode-session-end.sh new file mode 100644 index 0000000..e644d27 --- /dev/null +++ b/examples/hooks/palinode-session-end.sh @@ -0,0 +1,62 @@ +#!/bin/bash +# palinode-session-end.sh — Auto-capture Claude Code sessions to Palinode. +# +# Fires on SessionEnd (including /clear, logout, exit). Reads the transcript +# from stdin JSON, extracts a minimal summary, and POSTs to palinode-api. +# +# Fail-silent by design — never block Claude Code exit. If the API is down +# we drop the capture and move on. Nightly consolidation will pick up the +# snapshot on the next pass. +# +# Install: +# 1. Copy to .claude/hooks/palinode-session-end.sh (or ~/.claude/hooks/…) +# 2. chmod +x .claude/hooks/palinode-session-end.sh +# 3. Register in .claude/settings.json — see ./settings.json in this dir. +# +# Or just run: `palinode init` — it installs all of this for you. + +set -euo pipefail + +PALINODE_API="${PALINODE_API_URL:-http://localhost:6340}" +MIN_MESSAGES="${PALINODE_HOOK_MIN_MESSAGES:-3}" + +INPUT=$(cat) +TRANSCRIPT_PATH=$(echo "$INPUT" | jq -r '.transcript_path // empty') +CWD=$(echo "$INPUT" | jq -r '.cwd // empty') +SOURCE_REASON=$(echo "$INPUT" | jq -r '.source // .reason // "other"') + +# No transcript → nothing to capture. +if [ -z "$TRANSCRIPT_PATH" ] || [ ! -f "$TRANSCRIPT_PATH" ]; then + exit 0 +fi + +# Claude Code transcript format (JSONL): +# user: {type: "user", message: {role: "user", content: "text"}} +# assistant: {type: "assistant", message: {content: [{type: "text", text: "..."}]}} +MSG_COUNT=$(jq -r 'select(.type == "user") | .message.content // empty' \ + "$TRANSCRIPT_PATH" 2>/dev/null | grep -c '.' 2>/dev/null || echo "0") + +# Skip trivial sessions (few messages = not worth a memory). +if [ "$MSG_COUNT" -lt "$MIN_MESSAGES" ]; then + exit 0 +fi + +PROJECT=$(basename "$CWD" 2>/dev/null || echo "unknown") +FIRST_PROMPT=$(jq -r 'select(.type == "user") | .message.content // empty' \ + "$TRANSCRIPT_PATH" 2>/dev/null | head -1 | cut -c1-200) + +SUMMARY="Auto-captured (${SOURCE_REASON}, ${MSG_COUNT} messages). Topic: ${FIRST_PROMPT}" + +curl -sS -o /dev/null \ + -X POST "${PALINODE_API}/session-end" \ + -H "Content-Type: application/json" \ + -d "$(jq -n \ + --arg summary "$SUMMARY" \ + --arg project "$PROJECT" \ + --arg source "claude-code-hook" \ + '{summary: $summary, project: $project, source: $source, decisions: [], blockers: []}' + )" \ + --connect-timeout 5 \ + --max-time 10 || true + +exit 0 diff --git a/examples/hooks/settings.json b/examples/hooks/settings.json new file mode 100644 index 0000000..b4b1b70 --- /dev/null +++ b/examples/hooks/settings.json @@ -0,0 +1,15 @@ +{ + "hooks": { + "SessionEnd": [ + { + "hooks": [ + { + "type": "command", + "command": "${CLAUDE_PROJECT_DIR}/.claude/hooks/palinode-session-end.sh", + "timeout": 15 + } + ] + } + ] + } +} diff --git a/examples/sample-memory/README.md b/examples/sample-memory/README.md new file mode 100644 index 0000000..1d81ac9 --- /dev/null +++ b/examples/sample-memory/README.md @@ -0,0 +1,15 @@ +# Sample Memory + +A pre-populated memory directory for trying Palinode. Copy it to your memory directory: + +```bash +cp -r examples/sample-memory/* ~/.palinode/ +``` + +Contains: +- 3 people (Alice, Bob, Carol) +- 1 project (Mobile Checkout Redesign) +- 2 decisions (REST API choice, single-page checkout) +- 1 insight (integration testing strategy) + +All files reference each other via entities, so you can test entity graph traversal, search, blame, and other tools against realistic data. diff --git a/palinode/__init__.py b/palinode/__init__.py index f0821fe..fbf53db 100644 --- a/palinode/__init__.py +++ b/palinode/__init__.py @@ -11,4 +11,4 @@ mcp.py — MCP server for Claude Code integration cli.py — Command-line interface """ -__version__ = "0.2.0" +__version__ = "0.7.0" diff --git a/palinode/api/server.py b/palinode/api/server.py index d5e076c..6f495c3 100644 --- a/palinode/api/server.py +++ b/palinode/api/server.py @@ -22,7 +22,9 @@ from contextlib import asynccontextmanager -from fastapi import FastAPI, HTTPException +from fastapi import FastAPI, HTTPException, Request +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import JSONResponse from pydantic import BaseModel from palinode.core import store, embedder, git_tools @@ -65,7 +67,7 @@ async def lifespan(app: FastAPI): """Initialize database and background workers on startup.""" store.init_db() - # Tier 2a (ADR-004): write-time contradiction check worker + # Tier 2a: write-time contradiction check worker if config.consolidation.write_time.enabled: try: from palinode.consolidation import write_time @@ -88,7 +90,61 @@ async def lifespan(app: FastAPI): app = FastAPI(title="Palinode API", lifespan=lifespan) -# ── Auto-summary helpers ────────────────────────────────────────────────────── +# ── Security middleware ────────────────────────────────────────────────────── + +# CORS: restrict to configured origins (default: localhost only) +_cors_origins = os.environ.get("PALINODE_CORS_ORIGINS", "http://localhost:3000,http://127.0.0.1:3000").split(",") +app.add_middleware( + CORSMiddleware, + allow_origins=[o.strip() for o in _cors_origins], + allow_credentials=True, + allow_methods=["GET", "POST"], + allow_headers=["*"], +) + +# Request body size limit (default 5MB) +_MAX_REQUEST_BYTES = int(os.environ.get("PALINODE_MAX_REQUEST_BYTES", 5 * 1024 * 1024)) + +@app.middleware("http") +async def limit_request_size(request: Request, call_next): + """Reject oversized request bodies to prevent memory exhaustion.""" + content_length = request.headers.get("content-length") + if content_length and int(content_length) > _MAX_REQUEST_BYTES: + return JSONResponse(status_code=413, content={"detail": "Request body too large"}) + return await call_next(request) + +# Rate limiting (in-memory, per-IP, resets each window) +_RATE_LIMIT_WINDOW = 60 # seconds +_RATE_LIMIT_SEARCH = int(os.environ.get("PALINODE_RATE_LIMIT_SEARCH", 100)) +_RATE_LIMIT_WRITE = int(os.environ.get("PALINODE_RATE_LIMIT_WRITE", 30)) +_rate_counters: dict[str, dict[str, Any]] = {} + +def _check_rate_limit(client_ip: str, category: str, limit: int) -> bool: + """Return True if request is within rate limit, False if exceeded.""" + now = time.time() + key = f"{client_ip}:{category}" + entry = _rate_counters.get(key) + if not entry or now - entry["window_start"] > _RATE_LIMIT_WINDOW: + _rate_counters[key] = {"window_start": now, "count": 1} + return True + entry["count"] += 1 + return entry["count"] <= limit + +# Startup warning for unsafe binding +_api_host = os.environ.get("PALINODE_API_HOST", config.services.api.host) +if _api_host == "0.0.0.0": + logger.warning( + "API binding to 0.0.0.0 — accessible from any network. " + "No authentication is configured. Set PALINODE_API_HOST=127.0.0.1 for local-only access." + ) + +# ── Helpers ─────────────────────────────────────────────────────────────────── + + +def _safe_500(e: Exception, context: str = "Internal error") -> HTTPException: + """Log full exception, return sanitized 500 to client.""" + logger.exception(f"{context}: {e}") + return HTTPException(status_code=500, detail=context) def _utc_now() -> datetime: @@ -118,6 +174,84 @@ def _resolve_memory_path(file_path: str) -> tuple[str, str]: raise HTTPException(status_code=403, detail="Path traversal rejected") return base_dir, resolved +# ── Entity normalization ───────────────────────────────────────────────────── + +# Maps memory category dirs to singular entity-ref prefixes. +_CATEGORY_TO_ENTITY_PREFIX: dict[str, str] = { + "people": "person", + "decisions": "decision", + "projects": "project", + "insights": "insight", + "research": "research", + "inbox": "action", +} + + +def _normalize_entities(entities: list[str], category: str) -> list[str]: + """Ensure every entity ref has a category/ prefix. + + Bare strings (no '/') get a prefix inferred from the memory's own + category. Falls back to 'project/' when the category is unknown + (matches MCP context-resolution convention). + """ + prefix = _CATEGORY_TO_ENTITY_PREFIX.get(category, "project") + normalized = [] + for e in entities: + if "/" in e: + normalized.append(e) + else: + logger.info("Entity normalized: %r → %r", e, f"{prefix}/{e}") + normalized.append(f"{prefix}/{e}") + return normalized + + +def _generate_description(content: str) -> str: + """Generate a one-line description for a memory file. + + Tries a cheap Ollama call first; falls back to first-line extraction + if the LLM is unreachable. Never raises — returns empty string on + total failure. + """ + MAX_CHARS = 150 + + # Attempt LLM description + prompt = ( + "Write one sentence (max 150 chars) describing what this memory is about. " + "Be specific and factual. Output ONLY the sentence, no preamble.\n\n" + + content[:1500] + ) + url = config.auto_summary.ollama_url or config.embeddings.primary.url + try: + resp = httpx.post( + f"{url}/api/generate", + json={"model": config.auto_summary.model, "prompt": prompt, "stream": False}, + timeout=15.0, + ) + resp.raise_for_status() + raw = resp.json().get("response", "").strip().strip('"\'').strip() + if raw: + return raw[:MAX_CHARS] + except Exception as e: + logger.info(f"Ollama description call failed, using fallback: {e}") + + # Fallback: first meaningful line of content + return _extract_first_line(content, MAX_CHARS) + + +def _extract_first_line(content: str, max_chars: int = 150) -> str: + """Extract the first non-empty, non-header line from markdown content.""" + for line in content.split("\n"): + line = line.strip() + if not line: + continue + # Strip markdown headers + line = re.sub(r'^#+\s*', '', line) + line = line.strip() + if line: + return line[:max_chars] + return "" + + def _generate_summary(content: str) -> str: """Invokes Ollama to produce a single-sentence logical summary of file memory. @@ -189,7 +323,8 @@ class SearchRequest(BaseModel): hybrid: bool | None = None date_after: str | None = None date_before: str | None = None - context: list[str] | None = None # Entity refs for ambient context boost (ADR-008) + context: list[str] | None = None # Entity refs for ambient context boost + include_daily: bool | None = False # Skip daily/ penalty when True (#93) class SearchAssociativeRequest(BaseModel): query: str @@ -215,6 +350,7 @@ class SaveRequest(BaseModel): metadata: Any | None = None core: bool | None = None source: str | None = None + confidence: float | None = None @app.get("/list") @@ -303,18 +439,22 @@ def read_api(file_path: str, meta: bool = False) -> dict[str, Any]: return result except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise _safe_500(e, "File read failed") @app.post("/search") -def search_api(req: SearchRequest) -> list[dict[str, Any]]: +def search_api(req: SearchRequest, request: Request = None) -> list[dict[str, Any]]: """Semantic vector search against cached `.palinode.db` chunks. Returns: list[dict[str, Any]]: List payload sequence matching the criteria boundaries. """ + if request: + client_ip = request.client.host if request.client else "unknown" + if not _check_rate_limit(client_ip, "search", _RATE_LIMIT_SEARCH): + raise HTTPException(status_code=429, detail="Rate limit exceeded") try: - # ADR-008: Augment query with project context before embedding + # Augment query with project context before embedding embed_query = req.query if req.context and config.context.enabled and config.context.embed_augment: # Extract project name from entity ref (e.g., "project/palinode" → "palinode") @@ -339,6 +479,7 @@ def search_api(req: SearchRequest) -> list[dict[str, Any]]: date_after=req.date_after, date_before=req.date_before, context_entities=req.context, + include_daily=bool(req.include_daily), ) else: results = store.search( @@ -348,10 +489,12 @@ def search_api(req: SearchRequest) -> list[dict[str, Any]]: threshold=req.threshold or config.search.api_threshold, date_after=req.date_after, date_before=req.date_before, + context_entities=req.context, + include_daily=bool(req.include_daily), ) return results except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise _safe_500(e, "Search failed") @app.post("/search-associative") @@ -369,7 +512,7 @@ def search_associative_api(req: SearchAssociativeRequest) -> list[dict[str, Any] ) return results except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise _safe_500(e, "Associative search failed") @app.post("/triggers") @@ -392,7 +535,7 @@ def create_trigger_api(req: TriggerRequest) -> dict[str, Any]: ) return {"id": trigger_id, "status": "created"} except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise _safe_500(e, "Trigger creation failed") @app.get("/triggers") @@ -421,19 +564,25 @@ def check_triggers_api(req: CheckTriggersRequest) -> list[dict[str, Any]]: ) return results except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise _safe_500(e, "Trigger check failed") @app.post("/save") -def save_api(req: SaveRequest, sync: bool = False) -> dict[str, Any]: +def save_api(req: SaveRequest, request: Request = None, sync: bool = False) -> dict[str, Any]: """Persists a new memory instance chunk locally and initiates git backup sequences. Query params: - sync: If True, runs the write-time contradiction check (tier 2a, ADR-004) + sync: If True, runs the write-time contradiction check (tier 2a) inline and returns its result. If False (default), the check is enqueued for background processing and the response returns as soon as the file is written and git-committed. """ + if request: + client_ip = request.client.host if request.client else "unknown" + if not _check_rate_limit(client_ip, "write", _RATE_LIMIT_WRITE): + raise HTTPException(status_code=429, detail="Rate limit exceeded") + if len(req.content) > _MAX_REQUEST_BYTES: + raise HTTPException(status_code=413, detail="Content too large") slug = req.slug if slug: # Prevent any potential JSON escape or traversal exploits if user defines slug @@ -462,13 +611,18 @@ def save_api(req: SaveRequest, sync: bool = False) -> dict[str, Any]: file_path = os.path.join(config.palinode_dir, category, f"{slug}.md") os.makedirs(os.path.dirname(file_path), exist_ok=True) - content_hash = hashlib.sha256(req.content.encode()).hexdigest()[:16] - + content_hash = hashlib.sha256(req.content.encode()).hexdigest() + + # Normalize entity refs: bare strings get a category prefix. + # e.g. "palinode" → "project/palinode", "alice" → "person/alice" + raw_entities = req.entities or [] + normalized_entities = _normalize_entities(raw_entities, category) + frontmatter_dict = { "id": f"{category}-{slug}", "category": category, "type": req.type, - "entities": req.entities or [], + "entities": normalized_entities, "content_hash": content_hash, "created_at": time.strftime("%Y-%m-%dT%H:%M:%SZ") } @@ -476,10 +630,21 @@ def save_api(req: SaveRequest, sync: bool = False) -> dict[str, Any]: frontmatter_dict.update(req.metadata) if req.core is not None: frontmatter_dict["core"] = req.core - + if req.confidence is not None: + frontmatter_dict["confidence"] = req.confidence + frontmatter_dict["source"] = req.source or os.environ.get("PALINODE_SOURCE", "api") - - doc = f"---\n{yaml.dump(frontmatter_dict)}---\n\n{req.content}\n" + + # Auto-generate description if not already provided via metadata + if not frontmatter_dict.get("description"): + try: + desc = _generate_description(req.content) + if desc: + frontmatter_dict["description"] = desc + except Exception as e: + logger.warning(f"Description generation failed (non-fatal): {e}") + + doc = f"---\n{yaml.safe_dump(frontmatter_dict, default_flow_style=False, allow_unicode=True)}---\n\n{req.content}\n" with open(file_path, "w") as f: f.write(doc) @@ -513,7 +678,7 @@ def save_api(req: SaveRequest, sync: bool = False) -> dict[str, Any]: result: dict[str, Any] = {"file_path": file_path, "id": frontmatter_dict["id"]} - # Tier 2a (ADR-004): schedule write-time contradiction check. + # Tier 2a: schedule write-time contradiction check. # Always safe to call — returns None immediately if disabled in config. # Errors inside the scheduler are logged and swallowed; never propagate. if config.consolidation.write_time.enabled: @@ -613,7 +778,7 @@ def status_api() -> dict[str, Any]: stats["ollama_reachable"] = ollama_reachable - # Tier 2a (ADR-004) observability + # Tier 2a observability stats["write_time_enabled"] = config.consolidation.write_time.enabled if config.consolidation.write_time.enabled: try: @@ -635,6 +800,97 @@ def status_api() -> dict[str, Any]: return stats +@app.get("/health") +def health_api() -> dict[str, Any]: + """Lightweight liveness check — no side effects, <100ms.""" + result: dict[str, Any] = {"status": "ok"} + + # DB accessible + basic stats + try: + db = store.get_db() + chunks = db.execute("SELECT count(*) FROM chunks").fetchone()[0] + last_row = db.execute( + "SELECT last_updated FROM chunks ORDER BY last_updated DESC LIMIT 1" + ).fetchone() + result["chunks"] = chunks + result["last_indexed"] = last_row["last_updated"] if last_row else None + entities = db.execute("SELECT count(DISTINCT entity_ref) FROM entities").fetchone()[0] + result["entities"] = entities + db.close() + except Exception as e: + result["status"] = "degraded" + result["db_error"] = str(e) + + # Ollama reachable + try: + httpx.get(config.embeddings.primary.url, timeout=2.0) + result["ollama"] = True + except Exception: + result["ollama"] = False + + return result + + +@app.get("/health/watcher") +def watcher_health_api() -> dict[str, Any]: + """Canary check: write a temp file, verify it gets indexed, clean up. + + Returns watcher_alive=True if the file was indexed within the timeout. + Also checks systemd journal for recent watcher errors. + """ + import uuid as _uuid + canary_id = f"_canary-{_uuid.uuid4().hex[:8]}" + canary_dir = os.path.join(config.palinode_dir, "insights") + os.makedirs(canary_dir, exist_ok=True) + canary_path = os.path.join(canary_dir, f"{canary_id}.md") + canary_content = f"---\nid: {canary_id}\ncategory: insights\ntype: Insight\n---\nCanary check {canary_id}\n" + + result: dict[str, Any] = {"watcher_alive": False, "canary_id": canary_id} + + try: + # Write canary file + with open(canary_path, "w") as f: + f.write(canary_content) + + # Wait for watcher to pick it up (check every 0.5s, up to 8s) + import time as _time + for _ in range(16): + _time.sleep(0.5) + db = store.get_db() + row = db.execute( + "SELECT id FROM chunks WHERE file_path = ?", (canary_path,) + ).fetchone() + db.close() + if row: + result["watcher_alive"] = True + break + + # Check journal for recent watcher errors (last hour) + try: + import subprocess + journal = subprocess.run( + ["journalctl", "--user", "-u", "palinode-watcher", + "--since", "1 hour ago", "--no-pager", "-p", "err"], + capture_output=True, text=True, timeout=5 + ) + errors = [l for l in journal.stdout.strip().split("\n") if l.strip() and "-- No entries --" not in l] + result["recent_errors"] = len(errors) + if errors: + result["last_error"] = errors[-1][:200] + except Exception: + result["recent_errors"] = -1 # couldn't check + + finally: + # Clean up canary file and any indexed chunks + try: + os.remove(canary_path) + store.delete_file_chunks(canary_path) + except Exception: + pass + + return result + + @app.post("/ingest") def ingest_api() -> dict[str, str]: """Invoke document drop-box scanning routine.""" @@ -643,8 +899,7 @@ def ingest_api() -> dict[str, str]: process_inbox() return {"status": "success"} except Exception as e: - logger.error(f"Ingestion failed: {e}") - raise HTTPException(status_code=500, detail=str(e)) + raise _safe_500(e, "Ingestion failed") @app.post("/ingest-url") @@ -667,7 +922,7 @@ def ingest_url_api(req: dict[str, str]) -> dict[str, str]: return {"status": "success", "file_path": result} return {"status": "no_content"} except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) + raise _safe_500(e, "URL ingestion failed") @app.post("/rebuild-fts") @@ -684,25 +939,54 @@ def rebuild_fts_api() -> dict[str, Any]: @app.post("/reindex") -def reindex_api() -> dict[str, Any]: - """Resets memory boundaries enforcing a holistic index cycle across DB instances.""" - logger.info("Starting full reindex...") +def reindex_api(since: str | None = None) -> dict[str, Any]: + """Reindex memory files. Idempotent — unchanged files are skipped. + + Query params: + since: ISO timestamp (e.g. '2026-04-09T00:00:00Z'). If provided, + only files whose mtime is newer than this are processed. + Without it, all files are visited (but content-hash dedup + still skips unchanged content). + """ from palinode.indexer.watcher import PalinodeHandler handler = PalinodeHandler() + + since_ts: float | None = None + if since: + try: + from datetime import datetime, timezone + dt = datetime.fromisoformat(since.replace("Z", "+00:00")) + since_ts = dt.timestamp() + except ValueError: + raise HTTPException(status_code=400, detail=f"Invalid ISO timestamp: {since}") + + logger.info("Starting %s reindex...", "incremental" if since_ts else "full") count = 0 + skipped_mtime = 0 errors = 0 for filepath in glob.glob(os.path.join(config.palinode_dir, "**/*.md"), recursive=True): - if handler.is_valid_file(filepath): - try: - handler._process_file(filepath) - count += 1 - except Exception as e: - errors += 1 - logger.warning(f"Reindex failed for {filepath}: {e}") + if not handler.is_valid_file(filepath): + continue + if since_ts and os.path.getmtime(filepath) < since_ts: + skipped_mtime += 1 + continue + try: + handler._process_file(filepath) + count += 1 + except Exception as e: + errors += 1 + logger.warning(f"Reindex failed for {filepath}: {e}") + # Rebuild FTS5 after bulk reindex to ensure consistency fts_count = store.rebuild_fts() - logger.info(f"Reindex complete: {count} files processed, {errors} errors, FTS5: {fts_count}") - return {"status": "success", "files_reindexed": count, "errors": errors, "fts_chunks": fts_count} + logger.info(f"Reindex complete: {count} processed, {skipped_mtime} skipped (mtime), {errors} errors, FTS5: {fts_count}") + return { + "status": "success", + "files_reindexed": count, + "skipped_not_modified": skipped_mtime, + "errors": errors, + "fts_chunks": fts_count, + } @app.get("/entities/{entity_ref:path}") @@ -777,8 +1061,7 @@ def consolidate_api(req: ConsolidateRequest = None) -> dict[str, Any]: result = run_consolidation() return result except Exception as e: - logger.error(f"Consolidation failed: {e}") - raise HTTPException(status_code=500, detail=str(e)) + raise _safe_500(e, "Consolidation failed") @app.post("/split-layers") @@ -874,7 +1157,25 @@ def session_end_api(req: SessionEndRequest) -> dict[str, Any]: f.write(f"\n- [{today}] {one_liner}\n") status_file = f"projects/{req.project}-status.md" - # Git commit + # Also save as an individual indexed memory file (M0: dual-write). + # This gives each session-end its own frontmatter, entities, description, + # and embedding — searchable and retractable independently. + individual_file = None + try: + short_hash = hashlib.sha256(req.summary.encode()).hexdigest()[:8] + save_req = SaveRequest( + content=session_entry, + type="ProjectSnapshot" if req.project else "Insight", + slug=f"session-end-{today}-{req.project}-{short_hash}" if req.project else f"session-end-{today}-{short_hash}", + entities=[f"project/{req.project}"] if req.project else [], + source=source, + ) + save_result = save_api(save_req) + individual_file = save_result.get("file_path") + except Exception as e: + logger.error(f"Individual session-end file save failed (non-fatal): {e}") + + # Git commit (covers daily + status + individual file if save_api didn't commit) if config.git.auto_commit: try: files_to_add = [daily_path] @@ -892,6 +1193,7 @@ def session_end_api(req: SessionEndRequest) -> dict[str, Any]: return { "daily_file": f"daily/{today}.md", "status_file": status_file, + "individual_file": individual_file, "entry": session_entry, } @@ -1054,6 +1356,73 @@ def _set_active(file_path: str, active: bool) -> None: return {"activated": name, "task": task} +class MigrateOpenClawRequest(BaseModel): + path: str + dry_run: bool = False + + +@app.post("/migrate/openclaw") +def migrate_openclaw_api(req: MigrateOpenClawRequest) -> dict: + """Import a MEMORY.md from OpenClaw into Palinode. + + Parses each ## section into a separate memory file with heuristic + type detection (person / decision / project / insight). + + Args: + req: Request body with ``path`` (absolute or relative to memory_dir) + and optional ``dry_run`` flag. + + Returns: + dict with sections_found, files_created, files_skipped, log_file, dry_run. + """ + from palinode.migration.openclaw import run_migration + + path = req.path + if "\x00" in path: + raise HTTPException(status_code=400, detail="Null bytes are not allowed in path") + + # Resolve against memory_dir; reject paths that escape it. + base = _memory_base_dir() + if os.path.isabs(path): + resolved_path = os.path.realpath(path) + else: + resolved_path = os.path.realpath(os.path.join(base, path)) + try: + within = os.path.commonpath([base, resolved_path]) == base + except ValueError: + within = False + if not within: + raise HTTPException(status_code=403, detail="Path traversal rejected") + path = resolved_path + + if not os.path.isfile(path): + raise HTTPException(status_code=404, detail=f"File not found: {path}") + + try: + result = run_migration(source_path=path, dry_run=req.dry_run) + return result + except ValueError as exc: + raise HTTPException(status_code=400, detail=str(exc)) from exc + except Exception as exc: + logger.error(f"OpenClaw migration failed: {exc}") + raise HTTPException(status_code=500, detail=str(exc)) from exc + + +@app.post("/migrate/mem0") +def migrate_mem0_api() -> dict[str, str]: + """Run the Mem0 backfill pipeline. + + One-time migration: exports from Qdrant, deduplicates, classifies, + and generates Palinode markdown files. + """ + from palinode.migration.run_mem0_backfill import main as run_backfill + try: + run_backfill() + return {"status": "success", "message": "Mem0 backfill complete. Review files and reindex."} + except Exception as e: + raise _safe_500(e, "Backfill failed") + + def main() -> None: """Invokes Uvicorn CLI runner.""" import uvicorn diff --git a/palinode/cli/__init__.py b/palinode/cli/__init__.py index f616696..43babf0 100644 --- a/palinode/cli/__init__.py +++ b/palinode/cli/__init__.py @@ -1,4 +1,6 @@ import click +from palinode import __version__ +from palinode.core.brand import BANNER from palinode.core.config import config from palinode.cli.search import search from palinode.cli.save import save @@ -7,7 +9,7 @@ from palinode.cli.consolidate import consolidate from palinode.cli.trigger import trigger from palinode.cli.doctor import doctor -from palinode.cli.manage import reindex, rebuild_fts, split_layers, bootstrap_ids +from palinode.cli.manage import reindex, rebuild_fts, split_layers, bootstrap_ids, migrate_mem0 from palinode.cli.git import blame, history, rollback, push from palinode.cli.query import entities from palinode.cli.session_end import session_end @@ -16,13 +18,35 @@ from palinode.cli.lint import lint from palinode.cli.ingest import ingest from palinode.cli.prompt import prompt +from palinode.cli.migrate import migrate +from palinode.cli.init import init + +def _print_version(ctx: click.Context, param: click.Parameter, value: bool) -> None: + if not value or ctx.resilient_parsing: + return + click.echo(f"{BANNER}\n\npalinode {__version__}") + ctx.exit() @click.group() +@click.option( + "--version", + is_flag=True, + callback=_print_version, + expose_value=False, + is_eager=True, + help="Show the version banner and exit.", +) def main(): """Palinode — persistent agent memory.""" pass + +@main.command() +def banner() -> None: + """Print the Palinode ASCII brand mark.""" + click.echo(BANNER) + # Registration main.add_command(search) main.add_command(save) @@ -37,7 +61,7 @@ def main(): main.add_command(rebuild_fts) main.add_command(split_layers) main.add_command(bootstrap_ids) - +main.add_command(migrate_mem0) # Git main.add_command(blame) @@ -51,6 +75,7 @@ def main(): main.add_command(list_cmd, name="list") main.add_command(lint) main.add_command(ingest) +main.add_command(migrate) # Prompts main.add_command(prompt) @@ -58,6 +83,9 @@ def main(): # Session main.add_command(session_end) +# Project scaffolding +main.add_command(init) + @main.command() @click.option("--watcher/--no-watcher", default=True, help="Run memory watcher") @click.option("--api/--no-api", default=True, help="Run API server") diff --git a/palinode/cli/_api.py b/palinode/cli/_api.py index df85ec1..a16e017 100644 --- a/palinode/cli/_api.py +++ b/palinode/cli/_api.py @@ -107,6 +107,20 @@ def get_entities(self, entity: str = None): response.raise_for_status() return response.json() + def migrate_openclaw(self, path: str, dry_run: bool = False): + response = self.client.post( + "/migrate/openclaw", + json={"path": path, "dry_run": dry_run}, + timeout=120.0, + ) + response.raise_for_status() + return response.json() + + def migrate_mem0(self): + response = self.client.post("/migrate/mem0", timeout=600.0) + response.raise_for_status() + return response.json() + def blame(self, file_path: str, search: str = None): params: dict = {} if search: diff --git a/palinode/cli/init.py b/palinode/cli/init.py new file mode 100644 index 0000000..f2df942 --- /dev/null +++ b/palinode/cli/init.py @@ -0,0 +1,390 @@ +"""`palinode init` — scaffold Palinode into a project for zero-friction adoption. + +Creates: + - .claude/CLAUDE.md (memory section, appended if file exists) + - .claude/settings.json (SessionEnd hook for /clear auto-capture) + - .claude/hooks/palinode-session-end.sh (hook script) + - .mcp.json (MCP server block for palinode, if --mcp given) + +All writes are opt-out via flags. Existing files are preserved — we append +or skip, never overwrite without --force. +""" +import json +import os +import re +import stat +from pathlib import Path + +import click + + +CLAUDE_MD_BLOCK = """\ +## Memory (Palinode) + +This project uses Palinode for persistent memory via MCP (server name: `palinode`). + +### At session start +- Call `palinode_search` with the current task or project name to pull prior context. +- If the MCP server is down, fall back to the CLI: `palinode search ""`. + +### During work +- After a milestone (tests pass, feature shipped, bug root-caused), call + `palinode_save` with the outcome. Include *why*, not just *what*. +- When making an architectural or design decision, save the decision AND the + rationale as `type="Decision"`. +- Save surprising reusable findings as `type="Insight"`. +- Every ~30 minutes of active work, save a one-line progress note. + +### At session end — including `/clear` +- Call `palinode_session_end` with `summary`, `decisions`, `blockers`, and + `project="{project_slug}"` before the session terminates. +- `/clear` counts as a session end. The SessionEnd hook installed by + `palinode init` captures a fallback snapshot automatically, but calling + `palinode_session_end` from the agent first produces a far better record. +- The user may type `/ps` (Palinode Save) or `/wrap` (session wrap-up) as + shortcuts. These are **deterministic** — each maps to exactly one tool: + - `/ps` → always `palinode_save` with `type="ProjectSnapshot"`. Use for + mid-session checkpoints. + - `/wrap` → always `palinode_session_end` with summary/decisions/blockers. + Use before `/clear`. + Never dispatch one to the other's tool. See `.claude/commands/ps.md` and + `.claude/commands/wrap.md` for the exact prompts. + +### What NOT to save +- Raw code (git handles that). +- Step-by-step debug logs — save the resolution, not the journey. +- Trivial changes ("fixed typo" is not worth a memory). + +### Project slug +This project's slug is `{project_slug}`. Pass it as the `project` argument to +`palinode_save` and `palinode_session_end` so status rolls up correctly. +""" + + +HOOK_SCRIPT = """\ +#!/bin/bash +# palinode-session-end.sh — Auto-capture Claude Code sessions to Palinode. +# +# Fires on SessionEnd (including /clear, logout, exit). Reads the transcript +# from stdin JSON, extracts a minimal summary, and POSTs to palinode-api. +# +# Fail-silent by design — never block Claude Code exit. If the API is down +# we drop the capture and move on. + +set -euo pipefail + +PALINODE_API="${PALINODE_API_URL:-http://localhost:6340}" +MIN_MESSAGES="${PALINODE_HOOK_MIN_MESSAGES:-3}" + +INPUT=$(cat) +TRANSCRIPT_PATH=$(echo "$INPUT" | jq -r '.transcript_path // empty') +CWD=$(echo "$INPUT" | jq -r '.cwd // empty') +SOURCE_REASON=$(echo "$INPUT" | jq -r '.source // .reason // "other"') + +# No transcript → nothing to capture +if [ -z "$TRANSCRIPT_PATH" ] || [ ! -f "$TRANSCRIPT_PATH" ]; then + exit 0 +fi + +# Claude Code transcript format: +# user: {type: "user", message: {role: "user", content: "text"}} +# assistant: {type: "assistant", message: {content: [{type: "text", text: "..."}]}} +MSG_COUNT=$(jq -r 'select(.type == "user") | .message.content // empty' \\ + "$TRANSCRIPT_PATH" 2>/dev/null | grep -c '.' 2>/dev/null || echo "0") + +# Skip trivial sessions +if [ "$MSG_COUNT" -lt "$MIN_MESSAGES" ]; then + exit 0 +fi + +PROJECT=$(basename "$CWD" 2>/dev/null || echo "unknown") +FIRST_PROMPT=$(jq -r 'select(.type == "user") | .message.content // empty' \\ + "$TRANSCRIPT_PATH" 2>/dev/null | head -1 | cut -c1-200) + +SUMMARY="Auto-captured (${SOURCE_REASON}, ${MSG_COUNT} messages). Topic: ${FIRST_PROMPT}" + +curl -sS -o /dev/null \\ + -X POST "${PALINODE_API}/session-end" \\ + -H "Content-Type: application/json" \\ + -d "$(jq -n \\ + --arg summary "$SUMMARY" \\ + --arg project "$PROJECT" \\ + --arg source "claude-code-hook" \\ + '{summary: $summary, project: $project, source: $source, decisions: [], blockers: []}' + )" \\ + --connect-timeout 5 \\ + --max-time 10 || true + +exit 0 +""" + + +PS_COMMAND_BODY = """\ +--- +description: Palinode Save — drop a mid-session ProjectSnapshot to persistent memory. +--- + +Call `palinode_save` with: +- `type` — **always** `"ProjectSnapshot"` (this command is exclusively for + progress snapshots; use `/wrap` for end-of-session wrap-ups) +- `content` — a one-to-three sentence summary of what's been done since the + last save and what's next. Written in past/present tense, specific enough + that a future session could pick up where this one left off. +- `project` — the project slug from `.claude/CLAUDE.md` (or the directory + name if no slug is set) + +After saving, print one line: the file path and slug from the tool result. +Do not editorialise. Do not call any other tool. + +**This command is deterministic.** Always `palinode_save`, always +`ProjectSnapshot`. If the user is wrapping up for the day, they should use +`/wrap` instead — that calls `palinode_session_end` with a structured +summary, decisions, and blockers. +""" + + +WRAP_COMMAND_BODY = """\ +--- +description: Wrap up this session — structured session_end save before /clear. +--- + +Call `palinode_session_end` with: +- `summary` — 1-2 sentences on what was accomplished this session +- `decisions` — array of key decisions made, each with its rationale (the + *why*, not just the *what*) +- `blockers` — array of open questions, unfinished work, or next steps the + next session needs to pick up +- `project` — the project slug from `.claude/CLAUDE.md` (or the directory + name if no slug is set) + +After the tool returns, print exactly: `✓ session saved — safe to /clear now.` +followed by the daily-note path from the tool result. + +Do not call any other tool. Do not save as a ProjectSnapshot first — this +command is exclusively for structured session wrap-ups. + +**This command is deterministic.** Always `palinode_session_end`. For a +quick mid-session checkpoint, use `/ps` instead. +""" + + +SETTINGS_HOOK_BLOCK = { + "hooks": { + "SessionEnd": [ + { + "hooks": [ + { + "type": "command", + "command": "${CLAUDE_PROJECT_DIR}/.claude/hooks/palinode-session-end.sh", + "timeout": 15, + } + ] + } + ] + } +} + + +MCP_JSON_BLOCK = { + "mcpServers": { + "palinode": { + "command": "palinode-mcp", + "env": {}, + } + } +} + + +def _slugify(name: str) -> str: + """Turn a directory name into a safe project slug.""" + s = re.sub(r"[^a-zA-Z0-9_-]+", "-", name.strip().lower()) + s = re.sub(r"-+", "-", s).strip("-") + return s or "project" + + +def _ensure_parent(path: Path) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + + +def _write_claude_md(path: Path, project_slug: str, force: bool) -> str: + block = CLAUDE_MD_BLOCK.format(project_slug=project_slug) + _ensure_parent(path) + if not path.exists(): + path.write_text(block) + return "created" + existing = path.read_text() + if "## Memory (Palinode)" in existing and not force: + return "skipped (already has Palinode section)" + with path.open("a") as f: + if not existing.endswith("\n"): + f.write("\n") + f.write("\n" + block) + return "appended" + + +def _write_hook_script(path: Path, force: bool) -> str: + _ensure_parent(path) + if path.exists() and not force: + return "skipped (exists)" + path.write_text(HOOK_SCRIPT) + # chmod +x + mode = path.stat().st_mode + path.chmod(mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) + return "created" + + +def _merge_settings(path: Path, force: bool) -> str: + _ensure_parent(path) + if not path.exists(): + path.write_text(json.dumps(SETTINGS_HOOK_BLOCK, indent=2) + "\n") + return "created" + try: + existing = json.loads(path.read_text()) + except json.JSONDecodeError: + if not force: + return "skipped (existing settings.json is not valid JSON — re-run with --force to overwrite)" + existing = {} + hooks = existing.setdefault("hooks", {}) + session_end_hooks = hooks.setdefault("SessionEnd", []) + # Check for an existing palinode hook + for entry in session_end_hooks: + for h in entry.get("hooks", []): + if "palinode-session-end.sh" in h.get("command", ""): + return "skipped (palinode hook already registered)" + session_end_hooks.append(SETTINGS_HOOK_BLOCK["hooks"]["SessionEnd"][0]) + path.write_text(json.dumps(existing, indent=2) + "\n") + return "merged" + + +def _write_slash_command(path: Path, body: str, force: bool) -> str: + _ensure_parent(path) + if path.exists() and not force: + return "skipped (exists)" + path.write_text(body) + return "created" + + +def _merge_mcp_json(path: Path, force: bool) -> str: + _ensure_parent(path) + if not path.exists(): + path.write_text(json.dumps(MCP_JSON_BLOCK, indent=2) + "\n") + return "created" + try: + existing = json.loads(path.read_text()) + except json.JSONDecodeError: + if not force: + return "skipped (existing .mcp.json is not valid JSON — re-run with --force to overwrite)" + existing = {} + servers = existing.setdefault("mcpServers", {}) + if "palinode" in servers and not force: + return "skipped (palinode MCP server already configured)" + servers["palinode"] = MCP_JSON_BLOCK["mcpServers"]["palinode"] + path.write_text(json.dumps(existing, indent=2) + "\n") + return "merged" + + +@click.command("init") +@click.option( + "--dir", "target_dir", + default=".", + type=click.Path(file_okay=False), + help="Project directory to scaffold (default: current)", +) +@click.option( + "--project", "project_slug", + default=None, + help="Project slug (default: inferred from directory name)", +) +@click.option( + "--mcp/--no-mcp", + default=True, + help="Write .mcp.json with the palinode MCP server block", +) +@click.option( + "--claudemd/--no-claudemd", + default=True, + help="Write the Palinode memory block to .claude/CLAUDE.md", +) +@click.option( + "--hook/--no-hook", + default=True, + help="Install the SessionEnd hook script + .claude/settings.json", +) +@click.option( + "--slash/--no-slash", + default=True, + help="Install /ps and /wrap slash commands for save-before-clear reflex", +) +@click.option( + "--force", + is_flag=True, + help="Overwrite existing files (default: preserve / append / skip)", +) +@click.option( + "--dry-run", + is_flag=True, + help="Print what would change without writing anything", +) +def init(target_dir, project_slug, mcp, claudemd, hook, slash, force, dry_run): + """Scaffold Palinode into a project for zero-friction adoption. + + Creates (or appends to): + .claude/CLAUDE.md — memory instructions for the agent + .claude/settings.json — SessionEnd hook registration + .claude/hooks/palinode-session-end.sh — hook script (fires on /clear, exit) + .mcp.json — palinode MCP server block + + Re-run with --force to overwrite. --dry-run shows the plan without writing. + """ + target = Path(target_dir).resolve() + if not target.exists(): + raise click.ClickException(f"Directory not found: {target}") + + slug = project_slug or _slugify(target.name) + + claude_md = target / ".claude" / "CLAUDE.md" + settings = target / ".claude" / "settings.json" + hook_script = target / ".claude" / "hooks" / "palinode-session-end.sh" + mcp_json = target / ".mcp.json" + ps_cmd = target / ".claude" / "commands" / "ps.md" + wrap_cmd = target / ".claude" / "commands" / "wrap.md" + + click.echo(f"Palinode init → {target}") + click.echo(f" project slug: {slug}") + click.echo("") + + if dry_run: + click.echo("[dry-run] Would write:") + if claudemd: + click.echo(f" {claude_md.relative_to(target)} (memory instructions)") + if hook: + click.echo(f" {hook_script.relative_to(target)} (SessionEnd hook script)") + click.echo(f" {settings.relative_to(target)} (hook registration)") + if slash: + click.echo(f" {ps_cmd.relative_to(target)} (/ps slash command)") + click.echo(f" {wrap_cmd.relative_to(target)} (/wrap slash command)") + if mcp: + click.echo(f" {mcp_json.relative_to(target)} (MCP server block)") + return + + results = [] + if claudemd: + results.append(("CLAUDE.md", _write_claude_md(claude_md, slug, force))) + if hook: + results.append(("hook script", _write_hook_script(hook_script, force))) + results.append(("settings.json", _merge_settings(settings, force))) + if slash: + results.append(("/ps command", _write_slash_command(ps_cmd, PS_COMMAND_BODY, force))) + results.append(("/wrap command", _write_slash_command(wrap_cmd, WRAP_COMMAND_BODY, force))) + if mcp: + results.append((".mcp.json", _merge_mcp_json(mcp_json, force))) + + for label, status in results: + mark = "✓" if status in ("created", "appended", "merged") else "·" + click.echo(f" {mark} {label}: {status}") + + click.echo("") + click.echo("Next steps:") + click.echo(" 1. Make sure palinode-api is running (palinode start, or systemd)") + click.echo(" 2. Open the project in Claude Code — the MCP server will connect on start") + click.echo(" 3. Try it: \"search palinode for recent decisions on this project\"") diff --git a/palinode/cli/lint.py b/palinode/cli/lint.py index 4afdade..6068961 100644 --- a/palinode/cli/lint.py +++ b/palinode/cli/lint.py @@ -63,5 +63,36 @@ def lint(fmt): console.print(f" - {ct['entity']}: {ct['issue']}") else: console.print("[green]✓ No contradictions detected[/green]") - + + console.print("") + + # M0: new checks + missing_ent = data.get("missing_entities", []) + if missing_ent: + console.print(f"[bold yellow]Missing Entities ({len(missing_ent)})[/bold yellow]") + for me in missing_ent: + console.print(f" - {me}") + else: + console.print("[green]✓ All files have entity refs[/green]") + + console.print("") + + missing_desc = data.get("missing_descriptions", []) + if missing_desc: + console.print(f"[bold yellow]Missing Descriptions ({len(missing_desc)})[/bold yellow]") + for md in missing_desc: + console.print(f" - {md}") + else: + console.print("[green]✓ All files have descriptions[/green]") + + console.print("") + + core_count = data.get("core_count", 0) + if core_count > 10: + console.print(f"[bold red]Core Files: {core_count}[/bold red] (recommended: ≤10 — prune with `palinode list --core-only`)") + elif core_count > 0: + console.print(f"[green]Core Files: {core_count}[/green]") + else: + console.print("[dim]No core files found[/dim]") + console.print("") diff --git a/palinode/cli/manage.py b/palinode/cli/manage.py index 6a78dfc..78348db 100644 --- a/palinode/cli/manage.py +++ b/palinode/cli/manage.py @@ -41,3 +41,13 @@ def bootstrap_ids(fmt): print_result(result, fmt=OutputFormat(fmt) if fmt else get_default_format()) except Exception as e: console.print(f"[red]Error bootstrapping IDs: {str(e)}[/red]") + +@click.command(name="migrate-mem0") +@click.option("--format", "fmt", type=click.Choice(["json", "text"]), help="Output format") +def migrate_mem0(fmt): + """Backfill from Mem0/Qdrant.""" + try: + result = api_client.migrate_mem0() + print_result(result, fmt=OutputFormat(fmt) if fmt else get_default_format()) + except Exception as e: + console.print(f"[red]Error migrating: {str(e)}[/red]") diff --git a/palinode/cli/migrate.py b/palinode/cli/migrate.py new file mode 100644 index 0000000..0926e90 --- /dev/null +++ b/palinode/cli/migrate.py @@ -0,0 +1,95 @@ +import click +from palinode.cli._format import console, print_result, get_default_format, OutputFormat + + +@click.group() +def migrate(): + """Migration tools for importing memories from external sources.""" + pass + + +def _interactive_review(sections: list[dict]) -> list[dict]: + """Prompt the user to confirm or change each section's detected type.""" + valid_types = ("person", "decision", "project", "insight") + reviewed: list[dict] = [] + console.print("\n[bold]Review detected types[/bold] (enter to accept, type name to change, 's' to skip)\n") + for i, sec in enumerate(sections, 1): + preview = sec["body"][:80].replace("\n", " ") + console.print(f" [cyan]{i}.[/cyan] [bold]{sec['heading']}[/bold]") + console.print(f" {preview}{'…' if len(sec['body']) > 80 else ''}") + console.print(f" detected: [yellow]{sec['type']}[/yellow]") + answer = click.prompt( + " accept/change/skip", + default=sec["type"], + show_default=False, + ).strip().lower() + if answer == "s": + console.print(" [dim]skipped[/dim]") + continue + if answer in valid_types: + sec["type"] = answer + elif answer: + console.print(f" [red]unknown type '{answer}', keeping {sec['type']}[/red]") + reviewed.append(sec) + console.print() + return reviewed + + +@migrate.command(name="openclaw") +@click.argument("memory_file", type=click.Path(exists=True, dir_okay=False, readable=True)) +@click.option("--dry-run", is_flag=True, default=False, help="Show what would be imported without writing files") +@click.option("--review", is_flag=True, default=False, help="Interactively review and override detected types") +@click.option("--format", "fmt", type=click.Choice(["text", "json"]), help="Output format") +def openclaw(memory_file: str, dry_run: bool, review: bool, fmt: str | None) -> None: + """Import a MEMORY.md from OpenClaw into Palinode. + + Parses each ## section into a separate memory file with heuristic + type detection (person / decision / project / insight). + + Use --review to interactively confirm or change each section's type + before writing. + + MEMORY_FILE: Path to the MEMORY.md file to import. + """ + from palinode.migration.openclaw import run_migration + + output_fmt = OutputFormat(fmt) if fmt else get_default_format() + review_cb = _interactive_review if review else None + + try: + result = run_migration(source_path=memory_file, dry_run=dry_run, review_callback=review_cb) + except ValueError as exc: + console.print(f"[red]Error:[/red] {exc}") + raise SystemExit(1) + except Exception as exc: + console.print(f"[red]Migration failed:[/red] {exc}") + raise SystemExit(1) + + if output_fmt == OutputFormat.JSON: + print_result(result, fmt=output_fmt) + return + + # Human-readable output + prefix = "[yellow](dry-run)[/yellow] " if dry_run else "" + console.print(f"\n{prefix}[bold]OpenClaw migration complete[/bold]") + console.print(f" Sections found: {result['sections_found']}") + console.print(f" Files created: {len(result['files_created'])}") + console.print(f" Files skipped: {len(result['files_skipped'])} (duplicate content)") + + if result["files_created"]: + console.print("\n[green]Created:[/green]") + for fp in result["files_created"]: + console.print(f" {fp}") + + if result["files_skipped"]: + console.print("\n[dim]Skipped (identical content already exists):[/dim]") + for fp in result["files_skipped"]: + console.print(f" {fp}") + + if result.get("log_file"): + console.print(f"\n[dim]Migration log:[/dim] {result['log_file']}") + + if dry_run: + console.print( + "\n[yellow]Dry run — no files written. Remove --dry-run to import.[/yellow]" + ) diff --git a/palinode/cli/save.py b/palinode/cli/save.py index 7e08c1b..c0e045b 100644 --- a/palinode/cli/save.py +++ b/palinode/cli/save.py @@ -4,7 +4,8 @@ @click.command() @click.argument("content", required=False) -@click.option("--type", "memory_type", required=True, help="Memory type (e.g. PersonMemory, Decision, Insight)") +@click.option("--type", "memory_type", required=False, help="Memory type (e.g. PersonMemory, Decision, Insight, ProjectSnapshot)") +@click.option("--ps", "is_ps", is_flag=True, help="Shorthand for --type ProjectSnapshot (Palinode Save a mid-session snapshot)") @click.option("--entity", "entities", multiple=True, help="Entity tag (e.g. person/X, project/X)") @click.option("--file", "file_path", type=click.Path(exists=True), help="Read content from file instead of argument") @click.option("--title", help="Optional title override") @@ -17,8 +18,13 @@ "consolidation.write_time.enabled in config.", ) @click.option("--format", "fmt", type=click.Choice(["json", "text"]), help="Output format") -def save(content, memory_type, entities, file_path, title, source, sync, fmt): - """Store a new memory.""" +def save(content, memory_type, is_ps, entities, file_path, title, source, sync, fmt): + """Store a new memory. + + Use --ps as shorthand for --type ProjectSnapshot when dropping a quick + mid-session note ("Palinode Save"). For structured session wrap-ups with + decisions and blockers, use `palinode session-end` instead. + """ if file_path: with open(file_path, "r") as f: content = f.read() @@ -28,6 +34,18 @@ def save(content, memory_type, entities, file_path, title, source, sync, fmt): click.Abort() return + # Resolve memory type: --ps is shorthand for ProjectSnapshot + if is_ps and memory_type and memory_type != "ProjectSnapshot": + console.print(f"[red]Error: --ps conflicts with --type {memory_type}. Pick one.[/red]") + click.Abort() + return + if is_ps: + memory_type = "ProjectSnapshot" + if not memory_type: + console.print("[red]Error: Must provide --type or --ps.[/red]") + click.Abort() + return + try: source_val = source or "cli" result = api_client.save( diff --git a/palinode/cli/search.py b/palinode/cli/search.py index 7f39272..dfb4722 100644 --- a/palinode/cli/search.py +++ b/palinode/cli/search.py @@ -7,7 +7,7 @@ def _cli_resolve_context() -> list[str] | None: - """Resolve ambient project context from CWD for CLI (ADR-008).""" + """Resolve ambient project context from CWD for CLI.""" if not config.context.enabled: return None explicit = os.environ.get("PALINODE_PROJECT") diff --git a/palinode/cli/session_end.py b/palinode/cli/session_end.py index 2f84afb..eeb179b 100644 --- a/palinode/cli/session_end.py +++ b/palinode/cli/session_end.py @@ -1,4 +1,5 @@ import click +import hashlib import json import os from datetime import datetime, timezone @@ -67,6 +68,25 @@ def session_end(summary, decision, blocker, project, source, fmt): f.write(f"\n- [{today}] {one_liner}\n") status_msg = f" + status → {project}-status.md" + # Also save as an individual indexed memory file (M0: dual-write) + individual_file = None + try: + import httpx + short_hash = hashlib.sha256(summary.encode()).hexdigest()[:8] + api_url = f"http://localhost:{config.services.api.port}/save" + save_payload = { + "content": session_entry, + "type": "ProjectSnapshot" if project else "Insight", + "slug": f"session-end-{today}-{project}-{short_hash}" if project else f"session-end-{today}-{short_hash}", + "entities": [f"project/{project}"] if project else [], + "source": source_val, + } + resp = httpx.post(api_url, json=save_payload, timeout=10.0) + if resp.status_code == 200: + individual_file = resp.json().get("file_path") + except Exception: + pass # Non-fatal — daily append is the primary path + # Git commit try: import subprocess @@ -86,6 +106,7 @@ def session_end(summary, decision, blocker, project, source, fmt): result = { "daily_file": f"daily/{today}.md", + "individual_file": individual_file, "project_status": f"projects/{project}-status.md" if status_msg else None, "summary": summary, "decisions": decisions, diff --git a/palinode/consolidation/write_time.py b/palinode/consolidation/write_time.py index 33f5fe4..50648d3 100644 --- a/palinode/consolidation/write_time.py +++ b/palinode/consolidation/write_time.py @@ -1,5 +1,5 @@ """ -Tier 2a: Write-time contradiction check on palinode_save (ADR-004). +Tier 2a: Write-time contradiction check on palinode_save. When enabled, every save schedules a background contradiction check against similar existing memories. Runs asynchronously via an in-process asyncio queue @@ -7,7 +7,7 @@ (when the save comes from a CLI or plugin path without a long-lived worker). Errors in the check are logged but never propagate to the save caller. The -save-never-fails invariant is load-bearing — see ADR-004 for rationale. +save-never-fails invariant is load-bearing. Public API: schedule_contradiction_check(file_path, item, *, sync=False) -> dict | None @@ -86,7 +86,7 @@ def schedule_contradiction_check( Never raises. Errors in the check are logged and swallowed — the save call path must never fail because of a tier 2a problem. This is the - ADR-004 load-bearing invariant. + load-bearing invariant. """ if not config.consolidation.write_time.enabled: return None @@ -119,7 +119,10 @@ def sweep_pending_markers() -> int: if not os.path.isdir(pending_dir): return 0 - markers = sorted(glob.glob(os.path.join(pending_dir, "*.json"))) + markers = sorted( + p for p in glob.glob(os.path.join(pending_dir, "*.json")) + if not p.endswith(".failed.json") + ) recovered = 0 for marker_path in markers: diff --git a/palinode/core/audit.py b/palinode/core/audit.py new file mode 100644 index 0000000..08eb2ac --- /dev/null +++ b/palinode/core/audit.py @@ -0,0 +1,106 @@ +""" +MCP Audit Logger + +Structured JSONL logging for every MCP tool call. +Writes to {PALINODE_DIR}/.audit/mcp-calls.jsonl by default. + +Each entry records timestamp, tool name, sanitized arguments, +duration, status, and client identity for compliance and debugging. +""" +from __future__ import annotations + +import json +import logging +import os +import time +from datetime import datetime, timezone +from pathlib import Path +from typing import Any + +from palinode.core.config import AuditConfig + +logger = logging.getLogger("palinode.audit") + +# Fields whose values are truncated in log entries for privacy +_TRUNCATE_FIELDS = {"content", "query", "summary", "prompt", "description", "text"} +_TRUNCATE_MAX = 200 + + +def _sanitize_arguments(arguments: dict[str, Any]) -> dict[str, Any]: + """Return a copy of arguments with long content fields truncated.""" + sanitized: dict[str, Any] = {} + for key, value in arguments.items(): + if key in _TRUNCATE_FIELDS and isinstance(value, str) and len(value) > _TRUNCATE_MAX: + sanitized[key] = value[:_TRUNCATE_MAX] + "..." + else: + sanitized[key] = value + return sanitized + + +def _resolve_client_info() -> dict[str, str | None]: + """Gather available client identity from environment.""" + return { + "harness": os.environ.get("MCP_CLIENT_NAME") or os.environ.get("CLAUDE_CODE") or None, + "project": os.environ.get("PALINODE_PROJECT") or None, + "cwd": os.environ.get("CWD") or None, + } + + +class AuditLogger: + """Append-only JSONL audit logger for MCP tool calls.""" + + def __init__(self, memory_dir: str, audit_config: AuditConfig): + self._enabled = audit_config.enabled + if not self._enabled: + self._path: Path | None = None + return + + log_path = audit_config.log_path + if os.path.isabs(log_path): + self._path = Path(log_path) + else: + self._path = Path(memory_dir) / log_path + + # Create parent directory if needed + try: + self._path.parent.mkdir(parents=True, exist_ok=True) + except OSError as e: + logger.warning("Cannot create audit directory %s: %s", self._path.parent, e) + self._enabled = False + self._path = None + + @property + def enabled(self) -> bool: + return self._enabled + + @property + def log_path(self) -> Path | None: + return self._path + + def log_call( + self, + tool_name: str, + arguments: dict[str, Any], + duration_ms: float, + status: str, + error: str | None = None, + ) -> None: + """Write a single audit entry. Never raises — errors are logged and swallowed.""" + if not self._enabled or self._path is None: + return + + entry = { + "timestamp": datetime.now(timezone.utc).isoformat(), + "tool_name": tool_name, + "arguments": _sanitize_arguments(arguments), + "duration_ms": round(duration_ms, 1), + "status": status, + "error": error, + "client_info": _resolve_client_info(), + } + + try: + with open(self._path, "a", encoding="utf-8") as f: + f.write(json.dumps(entry, separators=(",", ":"), default=str) + "\n") + except OSError as e: + logger.warning("Audit write failed: %s", e) diff --git a/palinode/core/brand.py b/palinode/core/brand.py new file mode 100644 index 0000000..670bbfc --- /dev/null +++ b/palinode/core/brand.py @@ -0,0 +1,17 @@ +"""Brand assets for Palinode. + +The ASCII density-gradient mark is the primary brand. PNGs under +artifacts/ are secondary renderings of this same composition for venues +that require an image slot (directory submissions, social previews). + +Iterate on the ASCII if the concept ever needs refinement; everything +else is derived from it. +""" +from __future__ import annotations + +BANNER = """\ +┌─ palinode ─┐ +│ ░░░░░░░░░░ │ +│ ▓▓▓▓▓▓▓▓▓▓ │ +│ ██████████ │ +└────────────┘""" diff --git a/palinode/core/config.py b/palinode/core/config.py index 3977e11..b9c3f90 100644 --- a/palinode/core/config.py +++ b/palinode/core/config.py @@ -149,6 +149,8 @@ class SearchConfig: exclude_status: list[str] = field(default_factory=lambda: ["archived"]) hybrid_weight: float = 0.5 hybrid_enabled: bool = True + dedup_score_gap: float = 0.2 + daily_penalty: float = 0.3 # Multiplier for daily/ files (0.3 = 30% of original score) @dataclass class NightlyConfig: @@ -159,7 +161,7 @@ class NightlyConfig: @dataclass class WriteTimeConfig: - """Tier 2a (ADR-004): write-time contradiction check on palinode_save. + """Tier 2a: write-time contradiction check on palinode_save. When enabled, every save schedules a background contradiction check against similar existing memories. The check runs asynchronously @@ -234,6 +236,12 @@ class GitConfig: auto_push: bool = False commit_prefix: str = "palinode" +@dataclass +class AuditConfig: + """MCP tool call audit logging for compliance and debugging.""" + enabled: bool = True + log_path: str = ".audit/mcp-calls.jsonl" + @dataclass class LoggingConfig: """Log formatting and target directories constraints formats.""" @@ -281,6 +289,38 @@ class ContextConfig: project_map: dict[str, str] = field(default_factory=dict) # CWD basename → entity ref embed_augment: bool = True # Prepend project context to query before embedding +@dataclass +class ScopeConfig: + """Layer 1: scope chain for multi-harness, multi-agent, team memory. + + Scopes form an entity-ref hierarchy: org → member → project → harness → agent → session. + Memories inherit DOWN the chain by default. A session's scope is resolved from + env vars and config. + + Layer 1 scope (this slice): resolution only — produces a ScopeChain from + config + env. Later slices wire the chain into store search, the + /context/prime endpoint, and frontmatter `scope` field parsing. + + Env vars: + PALINODE_ORG → scope.org + PALINODE_MEMBER → scope.member + PALINODE_HARNESS → scope.harness (MCP client auto-detection is Layer 2+) + PALINODE_AGENT → scope.agent (multi-agent orchestration only) + + prime_mode: + "classic" — inject all core files regardless of scope (legacy, default + during Layer 1 rollout for backwards compatibility). + "scoped" — filter core files by the session's scope chain. Flip the + default to "scoped" in a follow-up once Slices 2-3 land. + """ + enabled: bool = False + org: str | None = None + member: str | None = None + harness: str | None = None + agent: str | None = None + prime_mode: str = "classic" + + @dataclass class CompactionConfig: """Operations controls algorithms parameters logic models layouts mapping endpoints.""" @@ -306,10 +346,12 @@ class Config: consolidation: ConsolidationConfig = field(default_factory=ConsolidationConfig) compaction: CompactionConfig = field(default_factory=CompactionConfig) context: ContextConfig = field(default_factory=ContextConfig) + scope: ScopeConfig = field(default_factory=ScopeConfig) decay: DecayConfig = field(default_factory=DecayConfig) services: ServicesConfig = field(default_factory=ServicesConfig) security: SecurityConfig = field(default_factory=SecurityConfig) git: GitConfig = field(default_factory=GitConfig) + audit: AuditConfig = field(default_factory=AuditConfig) logging: LoggingConfig = field(default_factory=LoggingConfig) @property @@ -385,6 +427,14 @@ def load_config() -> Config: cfg.services.api.port = int(os.environ["PALINODE_API_PORT"]) except ValueError: pass + if "PALINODE_ORG" in os.environ: + cfg.scope.org = os.environ["PALINODE_ORG"] + if "PALINODE_MEMBER" in os.environ: + cfg.scope.member = os.environ["PALINODE_MEMBER"] + if "PALINODE_HARNESS" in os.environ: + cfg.scope.harness = os.environ["PALINODE_HARNESS"] + if "PALINODE_AGENT" in os.environ: + cfg.scope.agent = os.environ["PALINODE_AGENT"] # Print summary string try: diff --git a/palinode/core/git_tools.py b/palinode/core/git_tools.py index 08da858..0f28c67 100644 --- a/palinode/core/git_tools.py +++ b/palinode/core/git_tools.py @@ -238,10 +238,6 @@ def history(file_path: str, limit: int = 20) -> list[dict[str, str]]: return commits -# Keep as alias for any remaining callers -timeline = history - - def rollback(file_path: str, commit: str | None = None, dry_run: bool = False) -> str: """Revert a memory file to a previous version. diff --git a/palinode/core/lint.py b/palinode/core/lint.py index 593cb75..f1b759a 100644 --- a/palinode/core/lint.py +++ b/palinode/core/lint.py @@ -24,9 +24,12 @@ def run_lint_pass() -> dict[str, Any]: stale_files = [] missing_fields = [] contradictions = [] # Heuristic placeholder - + missing_entities: list[str] = [] + missing_descriptions: list[str] = [] + core_count = 0 + now = datetime.now(timezone.utc) - + entity_references: dict[str, int] = {} all_files = [] @@ -82,7 +85,19 @@ def run_lint_pass() -> dict[str, Any]: if not has_entities and not is_referenced: orphaned_files.append(path) - # 3. Stale + # 3. Missing entities (non-daily files with empty entities list) + if not path.startswith("daily/") and not meta.get("entities"): + missing_entities.append(path) + + # 4. Missing description + if not path.startswith("daily/") and not meta.get("description"): + missing_descriptions.append(path) + + # 5. Core count + if meta.get("core"): + core_count += 1 + + # 6. Stale if meta.get("status") == "active": last_updated = meta.get("last_updated") or meta.get("created_at") if last_updated: @@ -127,5 +142,8 @@ def run_lint_pass() -> dict[str, Any]: "orphaned_files": orphaned_files, "stale_files": stale_files, "missing_fields": missing_fields, - "contradictions": unique_contradictions + "contradictions": unique_contradictions, + "missing_entities": missing_entities, + "missing_descriptions": missing_descriptions, + "core_count": core_count, } diff --git a/palinode/core/parser.py b/palinode/core/parser.py index 810ae00..b1896c5 100644 --- a/palinode/core/parser.py +++ b/palinode/core/parser.py @@ -22,6 +22,31 @@ def slugify(text: str) -> str: return text.strip('-') +def _build_canonical_question_prefix(metadata: dict[str, Any]) -> str: + """Build a text prefix from canonical_question frontmatter. + + Accepts a single string or a list of strings. Returns a formatted + prefix like ``"Q: …\\n\\n"`` ready to be prepended to chunk content, + or an empty string if the field is absent. + """ + cq = metadata.get("canonical_question") + if not cq: + return "" + + if isinstance(cq, str): + questions = [cq] + elif isinstance(cq, list): + questions = [str(q) for q in cq if q] + else: + return "" + + if not questions: + return "" + + lines = [f"Q: {q}" for q in questions] + return "\n".join(lines) + "\n\n" + + def parse_markdown(content: str) -> tuple[dict[str, Any], list[dict[str, str]]]: """Parses a complete markdown string payload containing YAML frontmatter. @@ -46,10 +71,13 @@ def parse_markdown(content: str) -> tuple[dict[str, Any], list[dict[str, str]]]: metadata = {} body = content - # If document is short (~500 tokens corresponds to roughly 2000 chars), keep it safely bound + # Build canonical question prefix from frontmatter (string or list of strings). + cq_prefix = _build_canonical_question_prefix(metadata) + + # If document is short (~500 tokens corresponds to roughly 2000 chars), keep it safely bound # to a single core chunk to prevent semantic fracturing. if len(body) < 2000: - return metadata, [{"section_id": "root", "content": body}] + return metadata, [{"section_id": "root", "content": cq_prefix + body}] # Split by h2 or h3 natively formatted headers. # regex intentionally matches lines starting exclusively with ## or ### @@ -86,4 +114,9 @@ def parse_markdown(content: str) -> tuple[dict[str, Any], list[dict[str, str]]]: if not sections: sections = [{"section_id": "root", "content": body}] + # Prepend canonical question prefix to the first chunk so the + # embedding captures the question semantics the file answers. + if cq_prefix and sections: + sections[0]["content"] = cq_prefix + sections[0]["content"] + return metadata, sections diff --git a/palinode/core/scope.py b/palinode/core/scope.py new file mode 100644 index 0000000..e68c82c --- /dev/null +++ b/palinode/core/scope.py @@ -0,0 +1,72 @@ +"""Scope chain resolution (Layer 1 of scoped memory). + +Build a ScopeChain from config + env + caller-supplied project/session, to be +consumed by scope-filtered search and the context prime endpoint. This module +is pure resolution — no I/O, no DB, no filtering. Isolating it here keeps +subsequent layers easy to test. +""" +from __future__ import annotations + +from dataclasses import dataclass + +from palinode.core.config import Config + + +@dataclass(frozen=True) +class ScopeChain: + """Ordered scope chain from narrowest (session) to broadest (org). + + Each level is an entity ref string (e.g. ``project/palinode``). + Unset levels are dropped when serialized via :meth:`as_list`. + The order of :meth:`as_list` is the search-priority order: earlier + entries are more specific and take precedence over later ones. + """ + session: str | None = None + agent: str | None = None + harness: str | None = None + project: str | None = None + member: str | None = None + org: str | None = None + + def as_list(self) -> list[str]: + """Return the chain as entity refs, narrow → broad, omitting unset levels.""" + entries: list[tuple[str, str | None]] = [ + ("session", self.session), + ("agent", self.agent), + ("harness", self.harness), + ("project", self.project), + ("member", self.member), + ("org", self.org), + ] + return [f"{kind}/{value}" for kind, value in entries if value] + + def is_empty(self) -> bool: + """True when no levels are set (caller has zero scoping context).""" + return not self.as_list() + + +def resolve_scope_chain( + cfg: Config, + project: str | None = None, + session_id: str | None = None, +) -> ScopeChain: + """Resolve the scope chain for the current session. + + ``project`` should be the caller-resolved project entity name (typically + supplied by the ambient-context detection). Pass ``None`` when the caller + has no project signal. + + ``session_id`` is the caller-generated session identifier. Pass ``None`` + when session-level scoping is not in use. + + Other levels are read from :class:`ScopeConfig` (env vars override YAML). + """ + s = cfg.scope + return ScopeChain( + session=session_id, + agent=s.agent, + harness=s.harness, + project=project, + member=s.member, + org=s.org, + ) diff --git a/palinode/core/store.py b/palinode/core/store.py index afdd41f..c49e871 100644 --- a/palinode/core/store.py +++ b/palinode/core/store.py @@ -352,7 +352,7 @@ def search(query_embedding: list[float], category: str | None = None, top_k (int): Maximum number of results to return. threshold (float): Minimum cosine similarity score (0.0-1.0). context_entities (list[str] | None): Entity refs (e.g. ["project/palinode"]) - for ADR-008 ambient context boost. Matching results get score * config.context.boost. + for ambient context boost. Matching results get score * config.context.boost. include_daily (bool): If True, skip the daily/ penalty (search daily notes at full rank). Returns: @@ -424,7 +424,7 @@ def search(query_embedding: list[float], category: str | None = None, db.close() - # ADR-008: Ambient context boost (same logic as search_hybrid) + # Ambient context boost (same logic as search_hybrid) if context_entities and config.context.enabled and config.context.boost != 1.0: context_files: set[str] = set() for entity in context_entities: @@ -689,7 +689,7 @@ def search_hybrid( for key in sorted_keys: result_map[key]["score"] = rrf_scores[key] / max_score - # Ambient context boost (ADR-008): boost results matching caller's project context + # Ambient context boost: boost results matching caller's project context if context_entities and config.context.enabled and config.context.boost != 1.0: context_files: set[str] = set() for entity in context_entities: diff --git a/palinode/mcp.py b/palinode/mcp.py index aa9af30..6455d57 100644 --- a/palinode/mcp.py +++ b/palinode/mcp.py @@ -6,7 +6,7 @@ All tool implementations are thin HTTP wrappers around the Palinode API server. The MCP server itself holds no database connections, embedder state, or git handles. -Set PALINODE_API_HOST to point at a remote API server (e.g. over a VPN). +Set PALINODE_API_HOST to point at a remote API server (e.g. over Tailscale). Tools: palinode_search — semantic search over memory files @@ -30,6 +30,7 @@ import asyncio import logging +import time from typing import Any import httpx @@ -40,15 +41,17 @@ import os from palinode.core.config import config +from palinode.core.audit import AuditLogger logger = logging.getLogger("palinode.mcp") logging.basicConfig(level=logging.WARNING) # quiet — don't pollute stdio server = Server("palinode") +_audit = AuditLogger(config.memory_dir, config.audit) def _resolve_context() -> list[str] | None: - """Resolve ambient project context from environment (ADR-008). + """Resolve ambient project context from environment. Resolution order: 1. PALINODE_PROJECT env var (explicit entity ref, e.g. "project/palinode") @@ -164,6 +167,10 @@ async def list_tools() -> list[types.Tool]: }, }, }, + annotations=types.ToolAnnotations( + title="List Memory Files", + readOnlyHint=True, destructiveHint=False, idempotentHint=True, openWorldHint=False, + ), ), types.Tool( name="palinode_read", @@ -181,6 +188,10 @@ async def list_tools() -> list[types.Tool]: }, "required": ["file_path"], }, + annotations=types.ToolAnnotations( + title="Read Memory File", + readOnlyHint=True, destructiveHint=False, idempotentHint=True, openWorldHint=False, + ), ), types.Tool( name="palinode_search", @@ -214,9 +225,18 @@ async def list_tools() -> list[types.Tool]: "type": "string", "description": "Filter results before an ISO date", }, + "include_daily": { + "type": "boolean", + "description": "Include daily session notes at full rank (default: false, daily/ files are penalized)", + "default": False, + }, }, "required": ["query"], }, + annotations=types.ToolAnnotations( + title="Search Memory", + readOnlyHint=True, destructiveHint=False, idempotentHint=True, openWorldHint=False, + ), ), types.Tool( name="palinode_save", @@ -256,6 +276,10 @@ async def list_tools() -> list[types.Tool]: }, "required": ["content", "type"], }, + annotations=types.ToolAnnotations( + title="Save Memory", + readOnlyHint=False, destructiveHint=False, idempotentHint=False, openWorldHint=False, + ), ), types.Tool( name="palinode_ingest", @@ -274,6 +298,10 @@ async def list_tools() -> list[types.Tool]: }, "required": ["url"], }, + annotations=types.ToolAnnotations( + title="Ingest URL", + readOnlyHint=False, destructiveHint=False, idempotentHint=False, openWorldHint=True, + ), ), types.Tool( name="palinode_status", @@ -282,6 +310,10 @@ async def list_tools() -> list[types.Tool]: "type": "object", "properties": {}, }, + annotations=types.ToolAnnotations( + title="Health Status", + readOnlyHint=True, destructiveHint=False, idempotentHint=True, openWorldHint=False, + ), ), types.Tool( name="palinode_history", @@ -304,6 +336,10 @@ async def list_tools() -> list[types.Tool]: }, "required": ["file_path"], }, + annotations=types.ToolAnnotations( + title="File History", + readOnlyHint=True, destructiveHint=False, idempotentHint=True, openWorldHint=False, + ), ), types.Tool( name="palinode_entities", @@ -317,6 +353,10 @@ async def list_tools() -> list[types.Tool]: } }, }, + annotations=types.ToolAnnotations( + title="Entity Graph", + readOnlyHint=True, destructiveHint=False, idempotentHint=True, openWorldHint=False, + ), ), types.Tool( name="palinode_consolidate", @@ -325,6 +365,10 @@ async def list_tools() -> list[types.Tool]: "type": "object", "properties": {}, }, + annotations=types.ToolAnnotations( + title="Run Consolidation", + readOnlyHint=False, destructiveHint=True, idempotentHint=False, openWorldHint=False, + ), ), types.Tool( name="palinode_diff", @@ -347,6 +391,10 @@ async def list_tools() -> list[types.Tool]: }, }, }, + annotations=types.ToolAnnotations( + title="Recent Changes", + readOnlyHint=True, destructiveHint=False, idempotentHint=True, openWorldHint=False, + ), ), types.Tool( name="palinode_blame", @@ -368,6 +416,10 @@ async def list_tools() -> list[types.Tool]: }, "required": ["file"], }, + annotations=types.ToolAnnotations( + title="Blame / Provenance", + readOnlyHint=True, destructiveHint=False, idempotentHint=True, openWorldHint=False, + ), ), types.Tool( name="palinode_rollback", @@ -394,11 +446,19 @@ async def list_tools() -> list[types.Tool]: }, "required": ["file"], }, + annotations=types.ToolAnnotations( + title="Rollback File", + readOnlyHint=False, destructiveHint=True, idempotentHint=False, openWorldHint=False, + ), ), types.Tool( name="palinode_push", description="Sync memory changes to GitHub for backup and cross-machine access.", inputSchema={"type": "object", "properties": {}}, + annotations=types.ToolAnnotations( + title="Push to Remote", + readOnlyHint=False, destructiveHint=False, idempotentHint=False, openWorldHint=True, + ), ), types.Tool( name="palinode_trigger", @@ -430,6 +490,10 @@ async def list_tools() -> list[types.Tool]: }, "required": ["action"], }, + annotations=types.ToolAnnotations( + title="Manage Triggers", + readOnlyHint=False, destructiveHint=False, idempotentHint=False, openWorldHint=False, + ), ), types.Tool( name="palinode_session_end", @@ -466,6 +530,10 @@ async def list_tools() -> list[types.Tool]: }, "required": ["summary"], }, + annotations=types.ToolAnnotations( + title="End Session", + readOnlyHint=False, destructiveHint=False, idempotentHint=False, openWorldHint=False, + ), ), types.Tool( name="palinode_lint", @@ -477,6 +545,10 @@ async def list_tools() -> list[types.Tool]: "type": "object", "properties": {}, }, + annotations=types.ToolAnnotations( + title="Lint Memory", + readOnlyHint=True, destructiveHint=False, idempotentHint=True, openWorldHint=False, + ), ), types.Tool( name="palinode_prompt", @@ -507,6 +579,10 @@ async def list_tools() -> list[types.Tool]: }, "required": ["action"], }, + annotations=types.ToolAnnotations( + title="Manage Prompts", + readOnlyHint=False, destructiveHint=False, idempotentHint=False, openWorldHint=False, + ), ), ] @@ -515,6 +591,24 @@ async def list_tools() -> list[types.Tool]: @server.call_tool() async def call_tool(name: str, arguments: dict[str, Any]) -> list[types.TextContent]: + start_time = time.monotonic() + result = await _dispatch_tool(name, arguments) + duration_ms = (time.monotonic() - start_time) * 1000 + + # Detect error responses (the dispatch handler returns error text rather than raising) + first_text = result[0].text if result else "" + is_error = first_text.startswith(("Error", "API Error", "Search failed", "Save failed", + "Ingest failed", "Push failed", "Consolidation failed", + "Session-end failed", "Lint failed")) + _audit.log_call( + name, arguments, duration_ms, + status="error" if is_error else "success", + error=first_text if is_error else None, + ) + return result + + +async def _dispatch_tool(name: str, arguments: dict[str, Any]) -> list[types.TextContent]: try: # ── list ────────────────────────────────────────────────────────── if name == "palinode_list": @@ -554,9 +648,11 @@ async def call_tool(name: str, arguments: dict[str, Any]) -> list[types.TextCont body["date_after"] = arguments["date_after"] if arguments.get("date_before"): body["date_before"] = arguments["date_before"] + if arguments.get("include_daily"): + body["include_daily"] = True # Use MCP threshold, not API default body["threshold"] = config.search.mcp_threshold - # ADR-008: ambient context boost + # ambient context boost context = _resolve_context() if context: body["context"] = context diff --git a/plugin/index.ts b/plugin/index.ts index e32a290..4325de0 100644 --- a/plugin/index.ts +++ b/plugin/index.ts @@ -387,21 +387,6 @@ const palinodePlugin = { }, }, { name: "palinode_blame" }); - api.registerTool({ - name: "palinode_history", - label: "Palinode History", - description: "Show git history of a memory file with diff stats and rename tracking.", - parameters: Type.Object({ - file: Type.String({ description: "Memory file path" }), - limit: Type.Optional(Type.Number({ description: "Max entries (default 20)" })), - }), - async execute(_id: string, params: any) { - const res = await palinodeFetch(cfg.palinodeApiUrl, `/history/${params.file}?limit=${params.limit || 20}`); - const lines = (res.history || []).map((h: any) => `${h.hash} | ${h.date} | ${h.message}${h.stats ? ' | ' + h.stats : ''}`); - return { content: [{ type: "text", text: lines.join("\n") || "No history found." }] }; - }, - }, { name: "palinode_history" }); - // ======================================================================== // Quick-save flag: -es at end of message → save to Palinode // Works from any channel (Telegram, webchat, Discord, etc.) diff --git a/pyproject.toml b/pyproject.toml index a3fecb2..1a004d0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ dependencies = [ "Repository" = "https://github.com/phasespace-labs/palinode" "Issues" = "https://github.com/phasespace-labs/palinode/issues" "Changelog" = "https://github.com/phasespace-labs/palinode/blob/main/docs/CHANGELOG.md" +"Privacy" = "https://github.com/phasespace-labs/palinode/blob/main/PRIVACY.md" [project.scripts] palinode-watcher = "palinode.indexer.watcher:main" @@ -57,8 +58,17 @@ palinode-mcp-sse = "palinode.mcp:main_sse" # deprecated alias for main_http [project.optional-dependencies] dev = [ "pytest", - "pytest-asyncio", + "pytest-asyncio" ] +[tool.pytest.ini_options] +testpaths = ["tests"] +# Exclude live tests from default runs (they need a running server) +addopts = "--ignore=tests/live" +markers = [ + "slow: marks tests that need Ollama or take >5s", +] +asyncio_mode = "strict" + [tool.setuptools] -packages = ["palinode", "palinode.core", "palinode.indexer", "palinode.api", "palinode.ingest", "palinode.consolidation", "palinode.cli"] +packages = ["palinode", "palinode.core", "palinode.indexer", "palinode.api", "palinode.ingest", "palinode.consolidation", "palinode.migration", "palinode.cli"] diff --git a/scripts/scrub-check.sh b/scripts/scrub-check.sh new file mode 100755 index 0000000..e5ca865 --- /dev/null +++ b/scripts/scrub-check.sh @@ -0,0 +1,106 @@ +#!/bin/bash +# Scan the public repo (or any directory) for leaked secrets/PII. +# Run against the public-push branch or the public repo clone. +# +# Usage: +# ./scripts/scrub-check.sh # scan current directory +# ./scripts/scrub-check.sh /path/to/public # scan specific path +# +# Exit code: 0 = clean, 1 = leaks found + +set -euo pipefail + +TARGET="${1:-.}" +FAILED=0 + +# Patterns that must NEVER appear in public code +PATTERNS=( + # Private IPs and infrastructure + '10\.2\.1\.(61|65|69)' + '100\.83\.166' + '100\.108\.11' + '\.ts\.net' + 'tailscale' + 'clawdbot' + + # Personal paths and usernames + '/home/clawd' + '~/clawd/' + 'clawd@' + 'paul-kyle-pedro' + + # Private repo references + 'palinode-dev' + + # Email addresses + 'grue\.lurker' + 'grue\.lurker@gmail' + + # Infrastructure specifics + 'deploy_5060' + 'engram-data' + 'engram-api' + 'engram-watcher' + + # Internal planning docs that shouldn't be in file content + 'POST-STRATEGY' + 'PUBLIC-LAUNCH-PLAN' + 'AGENT-ROADMAP' + 'CODEX-REVIEW-PROMPT' + 'GOVERNOR-MEM0-DISABLE' +) + +# Files/dirs to skip (binary, git internals, this script itself) +SKIP="--exclude-dir=.git --exclude-dir=node_modules --exclude-dir=__pycache__ --exclude=scrub-check.sh --exclude=*.db --exclude=*.pyc" + +echo "=== Palinode Public Repo Scrub Check ===" +echo "Target: $TARGET" +echo "" + +for pattern in "${PATTERNS[@]}"; do + matches=$(/usr/bin/grep -riE $SKIP "$pattern" "$TARGET" 2>/dev/null || true) + if [ -n "$matches" ]; then + echo "LEAK FOUND — pattern: $pattern" + echo "$matches" | head -5 + echo "" + FAILED=1 + fi +done + +# Check for files that should never exist in public +BAD_FILES=( + "CLAUDE.md" + "AGENTS.md" + ".roorules" + ".roomodes" + ".roo/mcp.json" + "deploy_5060.sh" + "HANDOFF.md" + "PLAN.md" + "PRD.md" + "FEATURES.md" + "specs/5060-usage-plane.md" + "specs/task-prompts" + "docs/POST-STRATEGY.md" + "docs/PUBLIC-LAUNCH-PLAN.md" + "docs/AGENT-ROADMAP.md" + "docs/CODEX-REVIEW-PROMPT.md" + "docs/GOVERNOR-MEM0-DISABLE.md" + "scripts/sync-ag-artifacts.sh" + "uv.lock" +) + +for f in "${BAD_FILES[@]}"; do + if [ -e "$TARGET/$f" ]; then + echo "FORBIDDEN FILE: $f exists in public tree" + FAILED=1 + fi +done + +echo "" +if [ $FAILED -eq 0 ]; then + echo "ALL CLEAN — no secrets or forbidden files found." +else + echo "LEAKS DETECTED — fix before pushing to public." + exit 1 +fi diff --git a/skill/palinode-claude-code/SKILL.md b/skill/palinode-claude-code/SKILL.md index e7cb81f..6b4f96f 100644 --- a/skill/palinode-claude-code/SKILL.md +++ b/skill/palinode-claude-code/SKILL.md @@ -7,7 +7,7 @@ metadata: mcp-server: palinode category: memory tags: [memory, persistence, claude-code, mcp, knowledge-management] - documentation: https://github.com/Paul-Kyle/palinode/blob/main/docs/INSTALL-CLAUDE-CODE.md + documentation: https://github.com/phasespace-labs/palinode/blob/main/docs/INSTALL-CLAUDE-CODE.md --- # Palinode — Claude Code Integration diff --git a/skill/palinode-claude-code/references/setup.md b/skill/palinode-claude-code/references/setup.md index 19cd495..59bd076 100644 --- a/skill/palinode-claude-code/references/setup.md +++ b/skill/palinode-claude-code/references/setup.md @@ -17,7 +17,7 @@ Best if you run Claude Code on the same machine as Palinode. ### 1. Install Palinode ```bash -git clone https://github.com/Paul-Kyle/palinode.git ~/palinode +git clone https://github.com/phasespace-labs/palinode.git ~/palinode cd ~/palinode python3 -m venv venv && source venv/bin/activate pip install -e . @@ -151,12 +151,11 @@ Search palinode for "recent project decisions" | `palinode_save` | Store a typed memory (person, decision, insight, project) | | `palinode_ingest` | Fetch a URL and save as research reference | | `palinode_status` | File counts, index health, entity graph size | -| `palinode_history` | Recent daily notes / capture history | +| `palinode_history` | File change history with diff stats and rename tracking | | `palinode_entities` | List known entities and their relationships | | `palinode_consolidate` | Run or preview the weekly compaction job | | `palinode_diff` | See what changed in a memory file (git diff) | | `palinode_blame` | Who/when each section was written | -| `palinode_history` | Git history with diff stats and rename tracking | | `palinode_rollback` | Revert a file to a previous state | | `palinode_push` | Push memory changes to remote git | | `palinode_trigger` | Register a prospective recall intention | diff --git a/skill/palinode-memory/SKILL.md b/skill/palinode-memory/SKILL.md index 68630ef..6fc5201 100644 --- a/skill/palinode-memory/SKILL.md +++ b/skill/palinode-memory/SKILL.md @@ -7,7 +7,7 @@ metadata: mcp-server: palinode category: memory tags: [memory, persistence, knowledge-management, agents] - documentation: https://github.com/Paul-Kyle/palinode + documentation: https://github.com/phasespace-labs/palinode --- # Palinode Memory @@ -86,7 +86,7 @@ Triggers fire when the user's message semantically matches `description`. Good f Use these when the user asks about memory history: - `palinode_diff` — what changed in the last N commits for a file - `palinode_blame` — who/when each section was written -- `palinode_history` — git history with diff stats and rename tracking +- `palinode_history` — file change history with diff stats and rename tracking - `palinode_rollback` — revert a file to a previous state ## Consolidation diff --git a/skill/palinode-memory/references/setup.md b/skill/palinode-memory/references/setup.md index ab6d8c2..f606a8c 100644 --- a/skill/palinode-memory/references/setup.md +++ b/skill/palinode-memory/references/setup.md @@ -4,7 +4,7 @@ ```bash # 1. Clone and install -git clone https://github.com/Paul-Kyle/palinode.git ~/palinode +git clone https://github.com/phasespace-labs/palinode.git ~/palinode cd ~/palinode && pip install -e . # 2. Memory directory @@ -15,10 +15,13 @@ cp palinode.config.yaml.example ~/.palinode/palinode.config.yaml PALINODE_DIR=~/.palinode python -m palinode.api.server & PALINODE_DIR=~/.palinode python -m palinode.indexer.watcher & -# 4. (Optional) Configure Claude Code MCP -# See docs/claude-code-setup.md in the repo. +# 4. Install plugin +cp -r ~/palinode/plugin ~/.openclaw/extensions/openclaw-palinode +openclaw gateway restart ``` +Full guide: `docs/INSTALL-OPENCLAW.md` in the repo. + ## Key Config Options (palinode.config.yaml) | Key | Default | Notes | diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/test_api_roundtrip.py b/tests/integration/test_api_roundtrip.py new file mode 100644 index 0000000..c0078fe --- /dev/null +++ b/tests/integration/test_api_roundtrip.py @@ -0,0 +1,356 @@ +"""Integration tests for Palinode API endpoints. + +Tests the API via FastAPI TestClient against a real (temp) SQLite database +and real filesystem. Only the embedder is mocked (returns fixed 1024-dim +vectors) so no Ollama or external services are required. +""" + +import os +import time +import yaml +from unittest import mock + +import pytest +from fastapi.testclient import TestClient + +from palinode.core.config import config + + +# --------------------------------------------------------------------------- +# Fixtures +# --------------------------------------------------------------------------- + +EMBED_DIM = 1024 + + +def _fake_embed(text: str, backend: str = "local") -> list[float]: + """Deterministic fake embedder -- no Ollama needed.""" + return [0.1] * EMBED_DIM + + +@pytest.fixture(autouse=True) +def _isolated_env(tmp_path, monkeypatch): + """Point config at a fresh tmp_path directory and init a real DB. + + Patches: + - config.memory_dir / config.db_path -> tmp_path + - config.git.auto_commit -> False (no real git in tmp) + - embedder.embed -> deterministic fixed vector + - _generate_description -> static fallback (no Ollama) + - _generate_summary -> empty string (no Ollama) + """ + memory_dir = str(tmp_path) + db_path = os.path.join(memory_dir, ".palinode.db") + + monkeypatch.setattr(config, "memory_dir", memory_dir) + monkeypatch.setattr(config, "db_path", db_path) + monkeypatch.setattr(config.git, "auto_commit", False) + + # Create standard category dirs + for d in ("people", "projects", "decisions", "insights", "research", "inbox", "daily"): + os.makedirs(os.path.join(memory_dir, d), exist_ok=True) + + # Init real SQLite + vec tables + from palinode.core import store + store.init_db() + + # Patch embedder and LLM helpers globally for the test + with ( + mock.patch("palinode.core.embedder.embed", side_effect=_fake_embed), + mock.patch("palinode.api.server._generate_description", return_value="Test description"), + mock.patch("palinode.api.server._generate_summary", return_value=""), + ): + yield memory_dir + + +@pytest.fixture() +def client(): + """Fresh TestClient wrapping the FastAPI app (no lifespan -- DB already init'd). + + Also clears the in-memory rate-limit counters so tests don't interfere + with each other. + """ + from palinode.api.server import app, _rate_counters + _rate_counters.clear() + # raise_server_exceptions=False so we can inspect 4xx/5xx responses + return TestClient(app, raise_server_exceptions=False) + + +# --------------------------------------------------------------------------- +# /save tests +# --------------------------------------------------------------------------- + + +def test_save_creates_file(client, tmp_path): + """POST /save should create a markdown file on disk with frontmatter.""" + resp = client.post("/save", json={ + "content": "Alice prefers dark mode.", + "type": "PersonMemory", + "slug": "alice-pref", + "entities": ["person/alice"], + }) + assert resp.status_code == 200 + data = resp.json() + fp = data["file_path"] + assert os.path.exists(fp) + + with open(fp) as f: + text = f.read() + assert "---" in text + assert "Alice prefers dark mode." in text + + # Parse frontmatter + parts = text.split("---", 2) + fm = yaml.safe_load(parts[1]) + assert fm["category"] == "people" + assert fm["type"] == "PersonMemory" + assert "person/alice" in fm["entities"] + + +def test_save_includes_content_hash(client, tmp_path): + """Saved file frontmatter must contain a SHA-256 content_hash.""" + resp = client.post("/save", json={ + "content": "Hash me please.", + "type": "Insight", + "slug": "hash-test", + }) + assert resp.status_code == 200 + fp = resp.json()["file_path"] + with open(fp) as f: + text = f.read() + + fm = yaml.safe_load(text.split("---", 2)[1]) + assert "content_hash" in fm + assert len(fm["content_hash"]) == 64 # SHA-256 hex + + +def test_save_with_confidence(client, tmp_path): + """confidence field should round-trip into frontmatter.""" + resp = client.post("/save", json={ + "content": "High-confidence fact.", + "type": "Decision", + "slug": "conf-test", + "confidence": 0.95, + }) + assert resp.status_code == 200 + fp = resp.json()["file_path"] + with open(fp) as f: + text = f.read() + + fm = yaml.safe_load(text.split("---", 2)[1]) + assert fm["confidence"] == 0.95 + + +def test_save_rate_limit(client): + """Exceeding write rate limit (30/min) should return 429.""" + # Clear any prior rate-limit state + from palinode.api.server import _rate_counters + _rate_counters.clear() + + for i in range(31): + resp = client.post("/save", json={ + "content": f"Item {i}", + "type": "Insight", + "slug": f"rate-{i}", + }) + if resp.status_code == 429: + assert i >= 30 # should only trigger after 30 + return + + # If we got here, 31 all succeeded -- the rate limit is per-IP and + # TestClient may use testclient; verify the 31st was blocked. + pytest.fail("Expected 429 after 30 rapid writes") + + +# --------------------------------------------------------------------------- +# /search tests +# --------------------------------------------------------------------------- + + +def test_search_returns_results(client, tmp_path): + """Save a file, manually index it, then search should find it.""" + # 1. Save via API + resp = client.post("/save", json={ + "content": "Palinode uses SQLite-vec for hybrid search.", + "type": "ProjectSnapshot", + "slug": "search-target", + "entities": ["project/palinode"], + }) + assert resp.status_code == 200 + fp = resp.json()["file_path"] + + # 2. Manually index the file into the DB (watcher isn't running) + from palinode.core import store + chunks = [{ + "id": "search-target-1", + "file_path": fp, + "section_id": None, + "category": "projects", + "content": "Palinode uses SQLite-vec for hybrid search.", + "metadata": {}, + "created_at": time.strftime("%Y-%m-%dT%H:%M:%SZ"), + "last_updated": time.strftime("%Y-%m-%dT%H:%M:%SZ"), + "embedding": _fake_embed("x"), + }] + store.upsert_chunks(chunks) + + # 3. Search + resp = client.post("/search", json={ + "query": "hybrid search", + "threshold": 0.0, + "limit": 5, + }) + assert resp.status_code == 200 + results = resp.json() + assert len(results) >= 1 + assert any("search-target" in r.get("file_path", r.get("file", "")) for r in results) + + +def test_search_rate_limit(client): + """Exceeding search rate limit (100/min) should return 429.""" + from palinode.api.server import _rate_counters + _rate_counters.clear() + + for i in range(101): + resp = client.post("/search", json={"query": f"test query {i}", "threshold": 0.0}) + if resp.status_code == 429: + assert i >= 100 + return + + pytest.fail("Expected 429 after 100 rapid searches") + + +# --------------------------------------------------------------------------- +# /read tests +# --------------------------------------------------------------------------- + + +def test_read_file(client, tmp_path): + """Save a file then read it back via /read.""" + resp = client.post("/save", json={ + "content": "Readable content here.", + "type": "Insight", + "slug": "read-me", + }) + assert resp.status_code == 200 + + resp = client.get("/read?file_path=insights/read-me.md") + assert resp.status_code == 200 + data = resp.json() + assert "Readable content here." in data["content"] + assert data["file"] == "insights/read-me.md" + + +def test_read_path_traversal_blocked(client): + """Path traversal attempts must be rejected with 403.""" + resp = client.get("/read?file_path=../../../etc/passwd") + assert resp.status_code == 403 + + +# --------------------------------------------------------------------------- +# /list tests +# --------------------------------------------------------------------------- + + +def test_list_files(client, tmp_path): + """Saved files should appear in /list results.""" + for slug in ("list-a", "list-b"): + resp = client.post("/save", json={ + "content": f"Content for {slug}.", + "type": "Insight", + "slug": slug, + }) + assert resp.status_code == 200 + + resp = client.get("/list") + assert resp.status_code == 200 + files = resp.json() + slugs_found = [f["file"] for f in files] + assert any("list-a" in f for f in slugs_found) + assert any("list-b" in f for f in slugs_found) + + +# --------------------------------------------------------------------------- +# /status and /health tests +# --------------------------------------------------------------------------- + + +def test_status_endpoint(client): + """GET /status should return 200 with expected stat keys.""" + resp = client.get("/status") + assert resp.status_code == 200 + data = resp.json() + assert "total_chunks" in data or "chunks" in data or isinstance(data, dict) + + +def test_health_endpoint(client): + """GET /health should return 200 with status=ok.""" + resp = client.get("/health") + assert resp.status_code == 200 + data = resp.json() + assert data["status"] in ("ok", "degraded") + assert "chunks" in data + + +# --------------------------------------------------------------------------- +# Security / edge-case tests +# --------------------------------------------------------------------------- + + +def test_save_oversized_rejected(client): + """POST /save with content exceeding 5MB should return 413.""" + big_content = "x" * (6 * 1024 * 1024) + resp = client.post("/save", json={ + "content": big_content, + "type": "Insight", + "slug": "too-big", + }) + assert resp.status_code == 413 + + +def test_error_no_stacktrace(client): + """Server errors should not leak Python tracebacks to the client.""" + # Force an internal error by requesting a read on a path that will + # trigger an exception inside the handler (null byte = 400, not 500, + # so we use a different approach: patch read_api to raise). + with mock.patch("palinode.api.server.read_api", side_effect=Exception("boom")): + # The patched function replaces the endpoint handler, but FastAPI + # already bound the original. Instead, trigger a real 500 by + # monkeypatching _memory_base_dir to raise. + pass + + # Simpler approach: try a search with a broken store + with mock.patch("palinode.core.store.get_db", side_effect=RuntimeError("db gone")): + resp = client.post("/search", json={"query": "anything"}) + # Should be 500 but without a traceback + assert resp.status_code == 500 + body = resp.text + assert "Traceback" not in body + assert "File " not in body or "server.py" not in body + + +# --------------------------------------------------------------------------- +# /session-end test +# --------------------------------------------------------------------------- + + +def test_session_end_creates_daily(client, tmp_path): + """POST /session-end should create a daily file.""" + resp = client.post("/session-end", json={ + "summary": "Finished integration tests.", + "decisions": ["Use TestClient for HTTP-level tests"], + "project": "palinode", + "source": "test", + }) + assert resp.status_code == 200 + data = resp.json() + + # Daily file should exist + daily_file = data.get("daily_file") + assert daily_file is not None + daily_path = os.path.join(str(tmp_path), daily_file) + assert os.path.exists(daily_path) + + with open(daily_path) as f: + content = f.read() + assert "Finished integration tests." in content diff --git a/tests/live/__init__.py b/tests/live/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/live/test_live_instance.py b/tests/live/test_live_instance.py new file mode 100644 index 0000000..84c7bb2 --- /dev/null +++ b/tests/live/test_live_instance.py @@ -0,0 +1,313 @@ +""" +Live Instance Test Suite for Palinode + +Tests a running Palinode API server with real Ollama embeddings and real +memory files. NOT part of CI — run manually to validate a deployment. + +Usage: + # Against localhost (default) + pytest tests/live/test_live_instance.py -v + + # Against a remote server + PALINODE_TEST_HOST=http://palinode.example.com:6340 pytest tests/live/test_live_instance.py -v + + # Skip slow tests (embedding/search) + pytest tests/live/test_live_instance.py -v -k "not slow" + +Requires: a running palinode-api with Ollama reachable. +""" +import os +import time +import json +import hashlib +import pytest +import httpx + +BASE_URL = os.environ.get("PALINODE_TEST_HOST", "http://localhost:6340") +client = httpx.Client(base_url=BASE_URL, timeout=60.0) + +# ── Unique test slug to avoid polluting real memory ────────────────────────── +_TEST_PREFIX = f"livetest-{int(time.time())}" + + +def _cleanup_test_files(): + """Remove any test files we created.""" + try: + resp = client.get("/list") + if resp.status_code == 200: + for f in resp.json(): + if _TEST_PREFIX in f.get("file", ""): + # No delete endpoint — files stay. They're small and harmless. + pass + except Exception: + pass + + +# ── Health & Connectivity ──────────────────────────────────────────────────── + +class TestHealth: + """Basic server health — run these first.""" + + def test_api_reachable(self): + resp = client.get("/status") + assert resp.status_code == 200 + + def test_status_fields(self): + data = client.get("/status").json() + assert "total_files" in data + assert "total_chunks" in data + assert "hybrid_search" in data + assert "ollama_reachable" in data + + def test_ollama_reachable(self): + data = client.get("/status").json() + assert data["ollama_reachable"] is True, "Ollama not reachable — search and save won't work" + + def test_health_endpoint(self): + resp = client.get("/health") + assert resp.status_code == 200 + data = resp.json() + assert data.get("status") in ("healthy", "ok", True) + + def test_chunks_indexed(self): + data = client.get("/status").json() + assert data["total_chunks"] > 0, "No chunks indexed — run palinode reindex first" + + def test_fts_in_sync(self): + data = client.get("/status").json() + assert data.get("fts_chunks", 0) > 0, "FTS index empty — run palinode rebuild-fts" + + +# ── Save & Retrieve ───────────────────────────────────────────────────────── + +class TestSaveRetrieve: + """Save a memory and read it back.""" + + def test_save_creates_file(self): + slug = f"{_TEST_PREFIX}-save" + resp = client.post("/save", json={ + "content": "Live test: verifying save creates a file.", + "type": "Insight", + "slug": slug, + }) + assert resp.status_code == 200 + data = resp.json() + assert "file_path" in data + assert slug in data["file_path"] + + def test_save_includes_content_hash(self): + slug = f"{_TEST_PREFIX}-hash" + resp = client.post("/save", json={ + "content": "Live test: content hash verification.", + "type": "Insight", + "slug": slug, + }) + data = resp.json() + file_path = data["file_path"] + + # Read back and check frontmatter has content_hash + # Extract relative path from absolute + rel_path = file_path.split("/")[-2] + "/" + file_path.split("/")[-1] + read_resp = client.get("/read", params={"file_path": rel_path, "meta": True}) + assert read_resp.status_code == 200 + read_data = read_resp.json() + assert "content_hash" in read_data.get("frontmatter", {}), "content_hash missing from frontmatter" + + def test_save_with_confidence(self): + slug = f"{_TEST_PREFIX}-conf" + resp = client.post("/save", json={ + "content": "Live test: confidence field.", + "type": "Decision", + "slug": slug, + "confidence": 0.85, + }) + data = resp.json() + rel_path = data["file_path"].split("/")[-2] + "/" + data["file_path"].split("/")[-1] + read_resp = client.get("/read", params={"file_path": rel_path, "meta": True}) + fm = read_resp.json().get("frontmatter", {}) + assert fm.get("confidence") == 0.85 + + def test_read_file(self): + slug = f"{_TEST_PREFIX}-read" + save_resp = client.post("/save", json={ + "content": "Live test: read back verification.", + "type": "Insight", + "slug": slug, + }) + file_path = save_resp.json()["file_path"] + rel_path = file_path.split("/")[-2] + "/" + file_path.split("/")[-1] + + read_resp = client.get("/read", params={"file_path": rel_path}) + assert read_resp.status_code == 200 + assert "read back verification" in read_resp.json()["content"] + + def test_list_includes_saved_file(self): + slug = f"{_TEST_PREFIX}-list" + client.post("/save", json={ + "content": "Live test: list verification.", + "type": "Insight", + "slug": slug, + }) + resp = client.get("/list") + assert resp.status_code == 200 + files = [f["file"] for f in resp.json()] + assert any(slug in f for f in files), f"Saved file {slug} not in list" + + +# ── Search (requires Ollama) ──────────────────────────────────────────────── + +class TestSearch: + """Search tests — these need Ollama for embeddings.""" + + @pytest.mark.slow + def test_search_returns_results(self): + """Search for something that should exist in any palinode instance.""" + resp = client.post("/search", json={"query": "palinode memory", "limit": 3}) + assert resp.status_code == 200 + results = resp.json() + assert len(results) > 0, "Search returned no results" + + @pytest.mark.slow + def test_search_has_score_fields(self): + resp = client.post("/search", json={"query": "architecture decision", "limit": 1}) + results = resp.json() + if results: + r = results[0] + assert "score" in r + assert "raw_score" in r + assert "freshness" in r + + @pytest.mark.slow + def test_search_freshness_not_unknown(self): + """After reindex, freshness should be valid or stale, not unknown.""" + resp = client.post("/search", json={"query": "project status", "limit": 5}) + results = resp.json() + for r in results: + assert r.get("freshness") != "unknown", ( + f"Freshness is 'unknown' for {r['file_path']} — " + f"content_hash may be missing from results" + ) + + @pytest.mark.slow + def test_search_daily_penalty(self): + """daily/ files should score lower than equivalent non-daily files.""" + resp = client.post("/search", json={"query": "session summary", "limit": 10}) + results = resp.json() + daily = [r for r in results if "/daily/" in r["file_path"]] + non_daily = [r for r in results if "/daily/" not in r["file_path"]] + if daily and non_daily: + # Daily files should generally score lower + avg_daily = sum(r["score"] for r in daily) / len(daily) + avg_other = sum(r["score"] for r in non_daily) / len(non_daily) + assert avg_daily < avg_other, ( + f"Daily avg ({avg_daily:.3f}) >= non-daily avg ({avg_other:.3f}) — " + f"daily penalty may not be working" + ) + + @pytest.mark.slow + def test_save_then_search(self): + """Save a unique memory, wait for indexing, search for it.""" + unique = f"xyzzy-{_TEST_PREFIX}-unique-marker" + client.post("/save", json={ + "content": f"Live test: {unique}. This is a unique searchable phrase.", + "type": "Insight", + "slug": f"{_TEST_PREFIX}-searchable", + }) + # Poll for up to 30s — watcher needs to detect file, call Ollama + # for embedding, and upsert into the index + found = False + for attempt in range(6): + time.sleep(5) + resp = client.post("/search", json={"query": unique, "limit": 3}) + results = resp.json() + if any(unique in r.get("content", "") for r in results): + found = True + break + assert found, ( + f"Saved memory with '{unique}' not found in search after 30s — " + f"watcher may not be running or Ollama unreachable" + ) + + +# ── Security ──────────────────────────────────────────────────────────────── + +class TestSecurity: + """Security checks against the live instance.""" + + def test_path_traversal_blocked(self): + resp = client.get("/read", params={"file_path": "../../../etc/passwd"}) + assert resp.status_code in (400, 403, 404), f"Path traversal not blocked: {resp.status_code}" + + def test_null_byte_blocked(self): + resp = client.get("/read", params={"file_path": "test\x00.md"}) + assert resp.status_code in (400, 403, 404, 422) + + def test_oversized_request(self): + huge = "x" * (6 * 1024 * 1024) # 6MB + resp = client.post("/save", json={"content": huge, "type": "Insight"}) + assert resp.status_code in (413, 422), f"Oversized request not rejected: {resp.status_code}" + + def test_error_no_stacktrace(self): + """Force an error and verify no Python traceback in response.""" + resp = client.get("/read", params={"file_path": "nonexistent/fakefile.md"}) + if resp.status_code >= 400: + body = resp.text + assert "Traceback" not in body, "Stack trace leaked in error response" + assert "File \"" not in body, "File path leaked in error response" + + +# ── Git Tools ──────────────────────────────────────────────────────────────── + +class TestGitTools: + """Git-backed provenance tools.""" + + def test_diff_returns(self): + resp = client.get("/diff", params={"days": 7}) + assert resp.status_code == 200 + + def test_lint_returns(self): + resp = client.post("/lint") + assert resp.status_code == 200 + + +# ── MCP Audit Log ─────────────────────────────────────────────────────────── + +class TestAuditLog: + """Verify MCP audit logging is working (requires MCP server).""" + + def test_status_check_generates_no_crash(self): + """At minimum, hitting the API shouldn't crash the audit logger.""" + resp = client.get("/status") + assert resp.status_code == 200 + + +# ── Rate Limiting ─────────────────────────────────────────────────────────── + +class TestRateLimiting: + """Verify rate limits are enforced.""" + + @pytest.mark.slow + def test_search_rate_limit(self): + """Exceed search rate limit (default 100/min).""" + blocked = False + for i in range(105): + resp = client.post("/search", json={"query": "rate limit test", "limit": 1}) + if resp.status_code == 429: + blocked = True + break + assert blocked, "Rate limit not triggered after 105 search requests" + + @pytest.mark.slow + def test_save_rate_limit(self): + """Exceed save rate limit (default 30/min).""" + blocked = False + for i in range(35): + resp = client.post("/save", json={ + "content": f"Rate limit test {i}", + "type": "Insight", + "slug": f"{_TEST_PREFIX}-rate-{i}", + }) + if resp.status_code == 429: + blocked = True + break + assert blocked, "Rate limit not triggered after 35 save requests" diff --git a/tests/test_audit.py b/tests/test_audit.py new file mode 100644 index 0000000..5555936 --- /dev/null +++ b/tests/test_audit.py @@ -0,0 +1,184 @@ +"""Tests for #116: MCP audit log (structured JSON tool call logging).""" +import json +import os +import time + +import pytest + +from palinode.core.audit import AuditLogger, _sanitize_arguments +from palinode.core.config import AuditConfig + + +@pytest.fixture +def audit_dir(tmp_path): + """Provide a temp directory for audit logs.""" + return tmp_path + + +@pytest.fixture +def audit_logger(audit_dir): + """Create an AuditLogger writing to a temp directory.""" + cfg = AuditConfig(enabled=True, log_path=".audit/mcp-calls.jsonl") + return AuditLogger(str(audit_dir), cfg) + + +def _read_entries(logger: AuditLogger) -> list[dict]: + """Read all JSONL entries from the audit log.""" + path = logger.log_path + assert path is not None + if not path.exists(): + return [] + entries = [] + with open(path) as f: + for line in f: + line = line.strip() + if line: + entries.append(json.loads(line)) + return entries + + +class TestSanitizeArguments: + def test_short_values_unchanged(self): + args = {"query": "hello", "limit": 5} + result = _sanitize_arguments(args) + assert result == args + + def test_long_content_truncated(self): + long_text = "x" * 300 + result = _sanitize_arguments({"content": long_text}) + assert len(result["content"]) == 203 # 200 + "..." + assert result["content"].endswith("...") + + def test_long_query_truncated(self): + long_query = "search " * 50 + result = _sanitize_arguments({"query": long_query}) + assert len(result["query"]) <= 204 # 200 + "..." + + def test_non_truncate_fields_left_alone(self): + long_value = "y" * 500 + result = _sanitize_arguments({"file_path": long_value, "category": "people"}) + assert result["file_path"] == long_value + + def test_non_string_values_left_alone(self): + result = _sanitize_arguments({"content": 12345, "limit": 5}) + assert result["content"] == 12345 + + +class TestAuditLogger: + def test_creates_directory(self, audit_dir): + cfg = AuditConfig(enabled=True, log_path=".audit/mcp-calls.jsonl") + logger = AuditLogger(str(audit_dir), cfg) + assert (audit_dir / ".audit").is_dir() + + def test_log_creates_file(self, audit_logger): + audit_logger.log_call("palinode_search", {"query": "test"}, 42.5, "success") + assert audit_logger.log_path.exists() + + def test_log_entry_structure(self, audit_logger): + audit_logger.log_call( + "palinode_save", + {"content": "important decision", "type": "Decision"}, + 123.4, + "success", + ) + entries = _read_entries(audit_logger) + assert len(entries) == 1 + entry = entries[0] + + assert entry["tool_name"] == "palinode_save" + assert entry["arguments"]["content"] == "important decision" + assert entry["arguments"]["type"] == "Decision" + assert entry["duration_ms"] == 123.4 + assert entry["status"] == "success" + assert entry["error"] is None + assert "timestamp" in entry + assert "client_info" in entry + + def test_log_error_entry(self, audit_logger): + audit_logger.log_call( + "palinode_search", + {"query": "fail"}, + 5.0, + "error", + error="Connection refused", + ) + entries = _read_entries(audit_logger) + assert len(entries) == 1 + assert entries[0]["status"] == "error" + assert entries[0]["error"] == "Connection refused" + + def test_multiple_entries_appended(self, audit_logger): + for i in range(3): + audit_logger.log_call(f"tool_{i}", {}, float(i), "success") + entries = _read_entries(audit_logger) + assert len(entries) == 3 + assert [e["tool_name"] for e in entries] == ["tool_0", "tool_1", "tool_2"] + + def test_content_truncated_in_log(self, audit_logger): + long_content = "a" * 500 + audit_logger.log_call( + "palinode_save", + {"content": long_content, "type": "Insight"}, + 10.0, + "success", + ) + entries = _read_entries(audit_logger) + logged_content = entries[0]["arguments"]["content"] + assert len(logged_content) == 203 # 200 + "..." + assert logged_content.endswith("...") + + def test_disabled_logger_does_nothing(self, audit_dir): + cfg = AuditConfig(enabled=False) + logger = AuditLogger(str(audit_dir), cfg) + logger.log_call("palinode_search", {"query": "test"}, 10.0, "success") + assert logger.log_path is None + # No file created + assert not (audit_dir / ".audit").exists() + + def test_timestamp_is_iso_format(self, audit_logger): + audit_logger.log_call("palinode_status", {}, 1.0, "success") + entries = _read_entries(audit_logger) + ts = entries[0]["timestamp"] + # Should be parseable as ISO 8601 + from datetime import datetime + dt = datetime.fromisoformat(ts) + assert dt.year >= 2026 + + def test_jsonl_format_one_line_per_entry(self, audit_logger): + audit_logger.log_call("tool_a", {"x": 1}, 1.0, "success") + audit_logger.log_call("tool_b", {"y": 2}, 2.0, "success") + with open(audit_logger.log_path) as f: + lines = f.readlines() + assert len(lines) == 2 + # Each line is valid JSON + for line in lines: + json.loads(line) + + def test_absolute_log_path(self, audit_dir): + abs_path = str(audit_dir / "custom" / "audit.jsonl") + cfg = AuditConfig(enabled=True, log_path=abs_path) + logger = AuditLogger(str(audit_dir), cfg) + logger.log_call("test", {}, 1.0, "success") + assert os.path.exists(abs_path) + + def test_client_info_populated(self, audit_logger, monkeypatch): + monkeypatch.setenv("MCP_CLIENT_NAME", "claude-code") + monkeypatch.setenv("PALINODE_PROJECT", "project/palinode") + audit_logger.log_call("palinode_search", {"query": "test"}, 5.0, "success") + entries = _read_entries(audit_logger) + ci = entries[0]["client_info"] + assert ci["harness"] == "claude-code" + assert ci["project"] == "project/palinode" + + +class TestAuditConfig: + def test_defaults(self): + cfg = AuditConfig() + assert cfg.enabled is True + assert cfg.log_path == ".audit/mcp-calls.jsonl" + + def test_config_from_yaml(self): + """AuditConfig can be overridden.""" + cfg = AuditConfig(enabled=False, log_path="custom/audit.jsonl") + assert cfg.enabled is False + assert cfg.log_path == "custom/audit.jsonl" diff --git a/tests/test_cli_init.py b/tests/test_cli_init.py new file mode 100644 index 0000000..ff1da0d --- /dev/null +++ b/tests/test_cli_init.py @@ -0,0 +1,187 @@ +"""Tests for `palinode init` — the zero-friction scaffolding command. + +These are regression guards for two things: + +1. The deterministic slash commands (`/ps` and `/wrap`). If someone refactors + `init.py` and accidentally reintroduces smart-dispatch, these tests fail. +2. The idempotent install flow — re-running init must not corrupt existing + files, and merging into existing JSON must not stomp unrelated keys. +""" +import json +from pathlib import Path + +from click.testing import CliRunner + +from palinode.cli import main +from palinode.cli.init import ( + PS_COMMAND_BODY, + WRAP_COMMAND_BODY, + _slugify, +) + + +# ---- Slug --------------------------------------------------------------- + + +def test_slugify_basic(): + assert _slugify("my-project") == "my-project" + assert _slugify("My Project") == "my-project" + assert _slugify("My Project!") == "my-project" + assert _slugify("palinode") == "palinode" + assert _slugify("foo_bar.baz") == "foo_bar-baz" + + +def test_slugify_falls_back_to_project(): + assert _slugify("") == "project" + assert _slugify("!!!") == "project" + + +# ---- Deterministic prompt guards ---------------------------------------- + + +def test_ps_command_is_deterministic(): + """/ps must always call palinode_save with type=ProjectSnapshot, never session_end.""" + body = PS_COMMAND_BODY + assert "palinode_save" in body + assert '"ProjectSnapshot"' in body + assert "This command is deterministic" in body + assert "Do not call any other tool" in body + # Must NOT contain smart dispatch instructions + assert "palinode_session_end" not in body or "use `/wrap`" in body + assert "Pick the right tool" not in body + + +def test_wrap_command_is_deterministic(): + """/wrap must always call palinode_session_end, never palinode_save.""" + body = WRAP_COMMAND_BODY + assert "palinode_session_end" in body + assert "summary" in body + assert "decisions" in body + assert "blockers" in body + assert "This command is deterministic" in body + assert "Do not call any other tool" in body + # Must tell the agent what to say after saving + assert "safe to /clear now" in body + # Must NOT dispatch to palinode_save + assert "palinode_save" not in body or "use `/ps`" in body + + +def test_ps_and_wrap_are_different(): + """The two commands must be distinct operations, not aliases.""" + assert PS_COMMAND_BODY != WRAP_COMMAND_BODY + + +# ---- Scaffolding flow --------------------------------------------------- + + +def test_init_creates_all_files(tmp_path: Path): + runner = CliRunner() + result = runner.invoke(main, ["init", "--dir", str(tmp_path)]) + assert result.exit_code == 0, result.output + + assert (tmp_path / ".claude" / "CLAUDE.md").exists() + assert (tmp_path / ".claude" / "settings.json").exists() + assert (tmp_path / ".claude" / "hooks" / "palinode-session-end.sh").exists() + assert (tmp_path / ".claude" / "commands" / "ps.md").exists() + assert (tmp_path / ".claude" / "commands" / "wrap.md").exists() + assert (tmp_path / ".mcp.json").exists() + + +def test_init_uses_directory_name_as_slug(tmp_path: Path): + proj = tmp_path / "my-awesome-project" + proj.mkdir() + runner = CliRunner() + result = runner.invoke(main, ["init", "--dir", str(proj)]) + assert result.exit_code == 0 + + content = (proj / ".claude" / "CLAUDE.md").read_text() + assert "my-awesome-project" in content + + +def test_init_explicit_project_slug_wins(tmp_path: Path): + runner = CliRunner() + result = runner.invoke(main, [ + "init", "--dir", str(tmp_path), "--project", "custom-slug", + ]) + assert result.exit_code == 0 + + content = (tmp_path / ".claude" / "CLAUDE.md").read_text() + assert "custom-slug" in content + + +def test_init_dry_run_writes_nothing(tmp_path: Path): + runner = CliRunner() + result = runner.invoke(main, ["init", "--dir", str(tmp_path), "--dry-run"]) + assert result.exit_code == 0 + assert "dry-run" in result.output + + assert not (tmp_path / ".claude").exists() + assert not (tmp_path / ".mcp.json").exists() + + +def test_init_is_idempotent(tmp_path: Path): + runner = CliRunner() + first = runner.invoke(main, ["init", "--dir", str(tmp_path)]) + assert first.exit_code == 0 + + ps_content = (tmp_path / ".claude" / "commands" / "ps.md").read_text() + settings_content = (tmp_path / ".claude" / "settings.json").read_text() + + second = runner.invoke(main, ["init", "--dir", str(tmp_path)]) + assert second.exit_code == 0 + assert "skipped" in second.output + + # Files unchanged + assert (tmp_path / ".claude" / "commands" / "ps.md").read_text() == ps_content + assert (tmp_path / ".claude" / "settings.json").read_text() == settings_content + + +def test_init_appends_to_existing_claude_md(tmp_path: Path): + claude_md = tmp_path / ".claude" / "CLAUDE.md" + claude_md.parent.mkdir(parents=True) + claude_md.write_text("# Pre-existing header\n\nSome project rules here.\n") + + runner = CliRunner() + result = runner.invoke(main, ["init", "--dir", str(tmp_path), "--no-hook", "--no-mcp", "--no-slash"]) + assert result.exit_code == 0 + + content = claude_md.read_text() + assert "# Pre-existing header" in content + assert "Some project rules here." in content + assert "## Memory (Palinode)" in content + + +def test_init_merges_into_existing_settings_json(tmp_path: Path): + settings = tmp_path / ".claude" / "settings.json" + settings.parent.mkdir(parents=True) + settings.write_text(json.dumps({ + "hooks": {"PreToolUse": [{"hooks": [{"type": "command", "command": "other.sh"}]}]}, + "unrelated_key": "should_survive", + }, indent=2)) + + runner = CliRunner() + result = runner.invoke(main, ["init", "--dir", str(tmp_path), "--no-claudemd", "--no-mcp", "--no-slash"]) + assert result.exit_code == 0 + + merged = json.loads(settings.read_text()) + assert merged["unrelated_key"] == "should_survive" + assert "PreToolUse" in merged["hooks"] + assert "SessionEnd" in merged["hooks"] + assert len(merged["hooks"]["SessionEnd"]) == 1 + + +def test_init_scope_flags(tmp_path: Path): + """--no-claudemd --no-hook --no-mcp should only write slash commands.""" + runner = CliRunner() + result = runner.invoke(main, [ + "init", "--dir", str(tmp_path), + "--no-claudemd", "--no-hook", "--no-mcp", + ]) + assert result.exit_code == 0 + + assert not (tmp_path / ".claude" / "CLAUDE.md").exists() + assert not (tmp_path / ".claude" / "settings.json").exists() + assert not (tmp_path / ".claude" / "hooks").exists() + assert not (tmp_path / ".mcp.json").exists() + assert (tmp_path / ".claude" / "commands" / "ps.md").exists() + assert (tmp_path / ".claude" / "commands" / "wrap.md").exists() diff --git a/tests/test_content_hash_and_confidence.py b/tests/test_content_hash_and_confidence.py new file mode 100644 index 0000000..7b21851 --- /dev/null +++ b/tests/test_content_hash_and_confidence.py @@ -0,0 +1,153 @@ +"""Tests for #114 (content_hash in frontmatter) and #113 (confidence field).""" +import hashlib +import pytest +import yaml +from fastapi.testclient import TestClient +from palinode.api.server import app +from palinode.core.config import config +from unittest.mock import patch + +client = TestClient(app) + + +@pytest.fixture +def mock_memory_dir(tmp_path): + old_memory_dir = config.memory_dir + config.memory_dir = str(tmp_path) + yield str(tmp_path) + config.memory_dir = old_memory_dir + + +def _read_frontmatter(file_path: str) -> dict: + """Read a memory file and return its parsed frontmatter dict.""" + with open(file_path, "r") as f: + raw = f.read() + # Extract YAML between --- delimiters + parts = raw.split("---", 2) + assert len(parts) >= 3, f"Expected frontmatter delimiters in:\n{raw}" + return yaml.safe_load(parts[1]) + + +# --- #114: content_hash in frontmatter --- + +class TestContentHash: + def test_save_includes_full_sha256_content_hash(self, mock_memory_dir): + """content_hash should be full SHA-256 hex digest of body content.""" + body = "This is the memory body content" + expected_hash = hashlib.sha256(body.encode()).hexdigest() + + with patch("palinode.core.store.scan_memory_content", return_value=(True, "OK")): + res = client.post("/save", json={ + "content": body, + "type": "Insight", + }) + assert res.status_code == 200 + + fm = _read_frontmatter(res.json()["file_path"]) + assert fm["content_hash"] == expected_hash + assert len(fm["content_hash"]) == 64 # full SHA-256 hex = 64 chars + + def test_content_hash_changes_with_content(self, mock_memory_dir): + """Different content should produce different hashes.""" + with patch("palinode.core.store.scan_memory_content", return_value=(True, "OK")): + res1 = client.post("/save", json={ + "content": "first content", + "type": "Insight", + "slug": "hash-test-1", + }) + res2 = client.post("/save", json={ + "content": "second content", + "type": "Insight", + "slug": "hash-test-2", + }) + + fm1 = _read_frontmatter(res1.json()["file_path"]) + fm2 = _read_frontmatter(res2.json()["file_path"]) + assert fm1["content_hash"] != fm2["content_hash"] + + def test_content_hash_is_deterministic(self, mock_memory_dir): + """Same content should always produce same hash.""" + body = "deterministic test body" + expected = hashlib.sha256(body.encode()).hexdigest() + + with patch("palinode.core.store.scan_memory_content", return_value=(True, "OK")): + res = client.post("/save", json={ + "content": body, + "type": "Insight", + "slug": "determ-test", + }) + + fm = _read_frontmatter(res.json()["file_path"]) + assert fm["content_hash"] == expected + + +# --- #113: confidence field --- + +class TestConfidence: + def test_save_with_confidence(self, mock_memory_dir): + """When confidence is provided, it appears in frontmatter.""" + with patch("palinode.core.store.scan_memory_content", return_value=(True, "OK")): + res = client.post("/save", json={ + "content": "High confidence fact", + "type": "Insight", + "confidence": 0.95, + }) + assert res.status_code == 200 + + fm = _read_frontmatter(res.json()["file_path"]) + assert fm["confidence"] == 0.95 + + def test_save_without_confidence_omits_field(self, mock_memory_dir): + """When confidence is not provided, it should NOT appear in frontmatter.""" + with patch("palinode.core.store.scan_memory_content", return_value=(True, "OK")): + res = client.post("/save", json={ + "content": "No confidence specified", + "type": "Insight", + }) + assert res.status_code == 200 + + fm = _read_frontmatter(res.json()["file_path"]) + assert "confidence" not in fm + + def test_save_confidence_zero(self, mock_memory_dir): + """Confidence of 0.0 should still be written (not treated as falsy).""" + with patch("palinode.core.store.scan_memory_content", return_value=(True, "OK")): + res = client.post("/save", json={ + "content": "Zero confidence fact", + "type": "Insight", + "confidence": 0.0, + }) + assert res.status_code == 200 + + fm = _read_frontmatter(res.json()["file_path"]) + assert fm["confidence"] == 0.0 + + def test_save_confidence_one(self, mock_memory_dir): + """Confidence of 1.0 should be written.""" + with patch("palinode.core.store.scan_memory_content", return_value=(True, "OK")): + res = client.post("/save", json={ + "content": "Full confidence fact", + "type": "Decision", + "confidence": 1.0, + }) + assert res.status_code == 200 + + fm = _read_frontmatter(res.json()["file_path"]) + assert fm["confidence"] == 1.0 + + def test_confidence_roundtrips_through_parser(self, mock_memory_dir): + """Confidence in frontmatter should be readable via the parser.""" + from palinode.core.parser import parse_markdown + + with patch("palinode.core.store.scan_memory_content", return_value=(True, "OK")): + res = client.post("/save", json={ + "content": "Roundtrip test", + "type": "Insight", + "confidence": 0.75, + }) + + with open(res.json()["file_path"], "r") as f: + raw = f.read() + + metadata, _ = parse_markdown(raw) + assert metadata["confidence"] == 0.75 diff --git a/tests/test_context.py b/tests/test_context.py index 5c9029c..ea99d9b 100644 --- a/tests/test_context.py +++ b/tests/test_context.py @@ -11,13 +11,13 @@ def test_search_hybrid_context_boost(): with patch("palinode.core.store.search_fts") as mock_fts: with patch("palinode.core.store.get_db"): with patch("palinode.core.store.get_entity_files") as mock_entities: - # Two results: palinode file and acme-project file, initially ranked equally + # Two results: palinode file and other-project file, initially ranked equally mock_vec.return_value = [ {"file_path": "/mem/projects/palinode-adr.md", "content": "ADR-004", "score": 0.85}, - {"file_path": "/mem/projects/acme-adr.md", "content": "ADR-052", "score": 0.86}, + {"file_path": "/mem/projects/other-adr.md", "content": "ADR-052", "score": 0.86}, ] mock_fts.return_value = [ - {"file_path": "/mem/projects/acme-adr.md", "content": "ADR-052", "score": 0.7}, + {"file_path": "/mem/projects/other-adr.md", "content": "ADR-052", "score": 0.7}, {"file_path": "/mem/projects/palinode-adr.md", "content": "ADR-004", "score": 0.6}, ] # Entity lookup: project/palinode maps to the palinode file @@ -25,7 +25,7 @@ def test_search_hybrid_context_boost(): {"file_path": "/mem/projects/palinode-adr.md", "category": "projects", "last_seen": "2026-04-12"} ] - # Without context: acme-project should rank first (higher combined score) + # Without context: other-project should rank first (higher combined score) results_no_ctx = store.search_hybrid( "ADR-004", query_embedding=[0.0]*1024, top_k=2, threshold=0.0, context_entities=None, @@ -111,10 +111,10 @@ def test_search_vector_context_boost(): """Non-hybrid search should also apply context boost (#92).""" with patch("palinode.core.store.get_db") as mock_db: with patch("palinode.core.store.get_entity_files") as mock_entities: - # Simulate two vector results: acme ranks higher by raw cosine + # Simulate two vector results: kmd ranks higher by raw cosine mock_cursor = MagicMock() mock_cursor.fetchall.return_value = [ - {"id": 1, "file_path": "/mem/projects/acme-adr.md", "section_id": "root", + {"id": 1, "file_path": "/mem/projects/kmd-adr.md", "section_id": "root", "content": "ADR-052", "category": "projects", "metadata": "{}", "created_at": "2026-04-12", "distance": 0.3}, {"id": 2, "file_path": "/mem/projects/palinode-adr.md", "section_id": "root", @@ -128,13 +128,13 @@ def test_search_vector_context_boost(): {"file_path": "/mem/projects/palinode-adr.md", "category": "projects", "last_seen": "2026-04-12"} ] - # Without context: acme first (lower distance = higher score) + # Without context: kmd first (lower distance = higher score) results_no_ctx = store.search( query_embedding=[0.0]*1024, top_k=2, threshold=0.0, context_entities=None, ) assert len(results_no_ctx) == 2 - assert "acme" in results_no_ctx[0]["file_path"] + assert "kmd" in results_no_ctx[0]["file_path"] # With context: palinode should be boosted to first results_ctx = store.search( @@ -223,7 +223,7 @@ def test_mcp_resolve_context_short_name(): def test_mcp_resolve_context_from_cwd(): """CWD basename should resolve to project entity via auto-detect.""" from palinode.mcp import _resolve_context - with patch.dict("os.environ", {"CWD": "/home/user/projects/palinode"}, clear=False): + with patch.dict("os.environ", {"CWD": "/Users/admin/Code/palinode"}, clear=False): with patch.dict("os.environ", {}, clear=False): # Remove PALINODE_PROJECT if set import os @@ -249,7 +249,7 @@ def test_mcp_resolve_context_disabled(): def test_cli_resolve_context(): """CLI context resolver should work from CWD.""" from palinode.cli.search import _cli_resolve_context - with patch("os.getcwd", return_value="/home/user/projects/palinode"): + with patch("os.getcwd", return_value="/Users/admin/Code/palinode"): with patch.dict("os.environ", {}, clear=False): import os env = os.environ.copy() diff --git a/tests/test_daily_penalty.py b/tests/test_daily_penalty.py new file mode 100644 index 0000000..a8b142c --- /dev/null +++ b/tests/test_daily_penalty.py @@ -0,0 +1,244 @@ +"""Tests for #93: penalize daily/ files in search results.""" +import pytest +from unittest.mock import patch, MagicMock +from palinode.core import store +from palinode.core.config import config + + +def test_is_daily_file(): + """_is_daily_file should match daily/ paths in various forms.""" + assert store._is_daily_file("daily/2026-04-12.md") is True + assert store._is_daily_file("/home/user/palinode/daily/2026-04-12.md") is True + assert store._is_daily_file("projects/daily-standup.md") is False + assert store._is_daily_file("decisions/use-daily-builds.md") is False + assert store._is_daily_file("daily/notes/misc.md") is True + + +def test_hybrid_daily_penalty_demotes_daily_files(): + """Daily files should be penalized below real memories in hybrid search.""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + original_penalty = config.search.daily_penalty + try: + config.search.daily_penalty = 0.3 + + # daily file ranks #1 in vec, real memory ranks #2 + mock_vec.return_value = [ + {"file_path": "daily/2026-04-12.md", "section_id": "root", + "content": "discussed palinode architecture", "score": 0.95}, + {"file_path": "projects/palinode.md", "section_id": "root", + "content": "palinode architecture overview", "score": 0.85}, + ] + mock_fts.return_value = [ + {"file_path": "daily/2026-04-12.md", "section_id": "root", + "content": "discussed palinode architecture", "score": 0.90}, + {"file_path": "projects/palinode.md", "section_id": "root", + "content": "palinode architecture overview", "score": 0.80}, + ] + + results = store.search_hybrid( + "palinode architecture", + query_embedding=[0.0] * 1024, + top_k=10, + threshold=0.0, + ) + + # Real memory should rank first after penalty + assert len(results) == 2 + assert results[0]["file_path"] == "projects/palinode.md" + assert results[1]["file_path"] == "daily/2026-04-12.md" + # Daily file score should be penalized + assert results[1]["score"] < results[0]["score"] + finally: + config.search.daily_penalty = original_penalty + + +def test_hybrid_daily_penalty_include_daily_skips_penalty(): + """include_daily=True should skip the penalty entirely.""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + original_penalty = config.search.daily_penalty + try: + config.search.daily_penalty = 0.3 + + mock_vec.return_value = [ + {"file_path": "daily/2026-04-12.md", "section_id": "root", + "content": "discussed palinode architecture", "score": 0.95}, + {"file_path": "projects/palinode.md", "section_id": "root", + "content": "palinode architecture overview", "score": 0.85}, + ] + mock_fts.return_value = [] + + results = store.search_hybrid( + "palinode architecture", + query_embedding=[0.0] * 1024, + top_k=10, + threshold=0.0, + include_daily=True, + ) + + # Daily file should still rank first (no penalty applied) + assert len(results) == 2 + assert results[0]["file_path"] == "daily/2026-04-12.md" + finally: + config.search.daily_penalty = original_penalty + + +def test_hybrid_daily_penalty_one_means_no_penalty(): + """daily_penalty=1.0 should be a no-op (no score change).""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + original_penalty = config.search.daily_penalty + try: + config.search.daily_penalty = 1.0 + + mock_vec.return_value = [ + {"file_path": "daily/2026-04-12.md", "section_id": "root", + "content": "session notes", "score": 0.95}, + {"file_path": "projects/palinode.md", "section_id": "root", + "content": "palinode overview", "score": 0.85}, + ] + mock_fts.return_value = [] + + results = store.search_hybrid( + "test", + query_embedding=[0.0] * 1024, + top_k=10, + threshold=0.0, + ) + + # Daily file should still rank first (penalty is 1.0 = no change) + assert len(results) == 2 + assert results[0]["file_path"] == "daily/2026-04-12.md" + finally: + config.search.daily_penalty = original_penalty + + +def test_vector_daily_penalty_demotes_daily_files(): + """Daily files should be penalized in vector-only search too.""" + with patch("palinode.core.store.get_db") as mock_db: + original_penalty = config.search.daily_penalty + try: + config.search.daily_penalty = 0.3 + + mock_cursor = MagicMock() + mock_cursor.fetchall.return_value = [ + {"id": 1, "file_path": "daily/2026-04-12.md", "section_id": "root", + "content": "session notes", "category": "daily", "metadata": "{}", + "created_at": "2026-04-12", "distance": 0.3}, + {"id": 2, "file_path": "projects/palinode.md", "section_id": "root", + "content": "palinode overview", "category": "projects", "metadata": "{}", + "created_at": "2026-04-12", "distance": 0.35}, + ] + mock_db.return_value.cursor.return_value = mock_cursor + mock_db.return_value.close = MagicMock() + + results = store.search( + query_embedding=[0.0] * 1024, + top_k=10, + threshold=0.0, + ) + + # After penalty, projects file should rank first + assert len(results) == 2 + assert results[0]["file_path"] == "projects/palinode.md" + assert results[1]["file_path"] == "daily/2026-04-12.md" + finally: + config.search.daily_penalty = original_penalty + + +def test_vector_daily_penalty_include_daily_skips(): + """include_daily=True should skip penalty in vector search.""" + with patch("palinode.core.store.get_db") as mock_db: + original_penalty = config.search.daily_penalty + try: + config.search.daily_penalty = 0.3 + + mock_cursor = MagicMock() + mock_cursor.fetchall.return_value = [ + {"id": 1, "file_path": "daily/2026-04-12.md", "section_id": "root", + "content": "session notes", "category": "daily", "metadata": "{}", + "created_at": "2026-04-12", "distance": 0.3}, + {"id": 2, "file_path": "projects/palinode.md", "section_id": "root", + "content": "palinode overview", "category": "projects", "metadata": "{}", + "created_at": "2026-04-12", "distance": 0.35}, + ] + mock_db.return_value.cursor.return_value = mock_cursor + mock_db.return_value.close = MagicMock() + + results = store.search( + query_embedding=[0.0] * 1024, + top_k=10, + threshold=0.0, + include_daily=True, + ) + + # Daily file should remain first (no penalty) + assert len(results) == 2 + assert results[0]["file_path"] == "daily/2026-04-12.md" + finally: + config.search.daily_penalty = original_penalty + + +def test_daily_penalty_config_default(): + """SearchConfig should have daily_penalty default of 0.3.""" + assert config.search.daily_penalty == 0.3 + + +def test_no_daily_files_unaffected(): + """When no daily files in results, penalty logic is a no-op.""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + mock_vec.return_value = [ + {"file_path": "projects/palinode.md", "section_id": "root", + "content": "overview", "score": 0.95}, + {"file_path": "decisions/adr-001.md", "section_id": "root", + "content": "decision", "score": 0.85}, + ] + mock_fts.return_value = [] + + results = store.search_hybrid( + "test", + query_embedding=[0.0] * 1024, + top_k=10, + threshold=0.0, + ) + + # Order unchanged, no daily files to penalize + assert len(results) == 2 + assert results[0]["file_path"] == "projects/palinode.md" + assert results[1]["file_path"] == "decisions/adr-001.md" + + +def test_daily_penalty_absolute_path(): + """Daily penalty should work with absolute file paths containing /daily/.""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + original_penalty = config.search.daily_penalty + try: + config.search.daily_penalty = 0.3 + + mock_vec.return_value = [ + {"file_path": "/home/user/palinode/daily/2026-04-12.md", + "section_id": "root", "content": "session notes", "score": 0.95}, + {"file_path": "/home/user/palinode/projects/palinode.md", + "section_id": "root", "content": "overview", "score": 0.85}, + ] + mock_fts.return_value = [] + + results = store.search_hybrid( + "test", + query_embedding=[0.0] * 1024, + top_k=10, + threshold=0.0, + ) + + # Absolute path with /daily/ should also be penalized + assert results[0]["file_path"] == "/home/user/palinode/projects/palinode.md" + finally: + config.search.daily_penalty = original_penalty diff --git a/tests/test_description.py b/tests/test_description.py new file mode 100644 index 0000000..bb8a011 --- /dev/null +++ b/tests/test_description.py @@ -0,0 +1,71 @@ +"""Tests for auto-generated description field on save (M0).""" +from unittest import mock + +from palinode.api.server import _generate_description, _extract_first_line + + +def test_extract_first_line_basic(): + assert _extract_first_line("Hello world\nSecond line") == "Hello world" + + +def test_extract_first_line_strips_headers(): + assert _extract_first_line("# My Title\nBody text") == "My Title" + assert _extract_first_line("## Sub heading\nBody") == "Sub heading" + + +def test_extract_first_line_skips_blank(): + assert _extract_first_line("\n\n \nActual content") == "Actual content" + + +def test_extract_first_line_truncates(): + long = "A" * 300 + result = _extract_first_line(long, max_chars=150) + assert len(result) == 150 + + +def test_extract_first_line_empty(): + assert _extract_first_line("") == "" + assert _extract_first_line(" \n \n ") == "" + + +def test_generate_description_fallback_on_connection_error(): + """When Ollama is unreachable, falls back to first-line extraction.""" + with mock.patch("palinode.api.server.httpx.post", side_effect=ConnectionError("offline")): + result = _generate_description("Decision to use SQLite for storage.\nMore details here.") + assert result == "Decision to use SQLite for storage." + + +def test_generate_description_uses_llm_when_available(): + """When Ollama responds, uses the LLM description.""" + mock_resp = mock.Mock() + mock_resp.status_code = 200 + mock_resp.raise_for_status = mock.Mock() + mock_resp.json.return_value = {"response": "A decision about database storage."} + + with mock.patch("palinode.api.server.httpx.post", return_value=mock_resp): + result = _generate_description("Decision to use SQLite for storage.\nMore details here.") + assert result == "A decision about database storage." + + +def test_generate_description_truncates_long_llm_response(): + """LLM responses longer than 150 chars get truncated.""" + mock_resp = mock.Mock() + mock_resp.status_code = 200 + mock_resp.raise_for_status = mock.Mock() + mock_resp.json.return_value = {"response": "X" * 200} + + with mock.patch("palinode.api.server.httpx.post", return_value=mock_resp): + result = _generate_description("Some content") + assert len(result) == 150 + + +def test_generate_description_empty_llm_response_falls_back(): + """Empty LLM response triggers first-line fallback.""" + mock_resp = mock.Mock() + mock_resp.status_code = 200 + mock_resp.raise_for_status = mock.Mock() + mock_resp.json.return_value = {"response": ""} + + with mock.patch("palinode.api.server.httpx.post", return_value=mock_resp): + result = _generate_description("# Important Decision\nWe chose option A.") + assert result == "Important Decision" diff --git a/tests/test_entity_normalization.py b/tests/test_entity_normalization.py new file mode 100644 index 0000000..762c60d --- /dev/null +++ b/tests/test_entity_normalization.py @@ -0,0 +1,42 @@ +"""Tests for entity normalization on the save path (M0).""" +from palinode.api.server import _normalize_entities + + +def test_bare_string_gets_category_prefix(): + assert _normalize_entities(["alice"], "people") == ["person/alice"] + + +def test_bare_string_project_category(): + assert _normalize_entities(["palinode"], "projects") == ["project/palinode"] + + +def test_already_prefixed_unchanged(): + assert _normalize_entities(["person/alice"], "people") == ["person/alice"] + assert _normalize_entities(["project/palinode"], "insights") == ["project/palinode"] + + +def test_mixed_bare_and_prefixed(): + result = _normalize_entities(["alice", "project/palinode"], "people") + assert result == ["person/alice", "project/palinode"] + + +def test_unknown_category_defaults_to_project(): + assert _normalize_entities(["foo"], "unknown_cat") == ["project/foo"] + + +def test_empty_list(): + assert _normalize_entities([], "people") == [] + + +def test_all_categories(): + cases = [ + ("people", "person"), + ("decisions", "decision"), + ("projects", "project"), + ("insights", "insight"), + ("research", "research"), + ("inbox", "action"), + ] + for category, expected_prefix in cases: + result = _normalize_entities(["test"], category) + assert result == [f"{expected_prefix}/test"], f"Failed for {category}" diff --git a/tests/test_lint.py b/tests/test_lint.py index f0475ea..4f37b01 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -41,16 +41,80 @@ def test_lint_pass(tmp_path, monkeypatch): contra_dup.write_text("---\nid: people-contra-dup\ncategory: people\ntype: Person\nstatus: active\n---\nBody", encoding="utf-8") result = run_lint_pass() - + # Validate missing fields assert any(mf["file"].endswith("missing1.md") and "id" in mf["missing"] for mf in result["missing_fields"]) - - # Validate stale + + # Validate stale assert any(sf["file"].endswith("stale1.md") for sf in result["stale_files"]) - + # Validate orphan assert any(of.endswith("orphan1.md") for of in result["orphaned_files"]) assert not any(of.endswith("stale1.md") for of in result["orphaned_files"]) # referenced! - + # Validate contradictions assert any(ct["entity"] == "people/contra" for ct in result["contradictions"]) + + # M0: Validate new keys exist + assert "missing_entities" in result + assert "missing_descriptions" in result + assert "core_count" in result + + +def test_lint_missing_entities(tmp_path, monkeypatch): + """Files without entities should be flagged.""" + monkeypatch.setattr(config, "memory_dir", str(tmp_path)) + people_dir = tmp_path / "people" + people_dir.mkdir() + + # File with no entities + (people_dir / "lonely.md").write_text( + "---\nid: people-lonely\ncategory: people\ntype: Person\n---\nBody" + ) + # File with entities + (people_dir / "linked.md").write_text( + "---\nid: people-linked\ncategory: people\ntype: Person\nentities:\n - project/foo\n---\nBody" + ) + + result = run_lint_pass() + assert any("lonely.md" in f for f in result["missing_entities"]) + assert not any("linked.md" in f for f in result["missing_entities"]) + + +def test_lint_missing_descriptions(tmp_path, monkeypatch): + """Files without description should be flagged.""" + monkeypatch.setattr(config, "memory_dir", str(tmp_path)) + insights_dir = tmp_path / "insights" + insights_dir.mkdir() + + # No description + (insights_dir / "nodesc.md").write_text( + "---\nid: insights-nodesc\ncategory: insights\ntype: Insight\n---\nBody" + ) + # Has description + (insights_dir / "hasdesc.md").write_text( + "---\nid: insights-hasdesc\ncategory: insights\ntype: Insight\ndescription: A useful insight\n---\nBody" + ) + + result = run_lint_pass() + assert any("nodesc.md" in f for f in result["missing_descriptions"]) + assert not any("hasdesc.md" in f for f in result["missing_descriptions"]) + + +def test_lint_core_count(tmp_path, monkeypatch): + """Core count should tally files with core: true.""" + monkeypatch.setattr(config, "memory_dir", str(tmp_path)) + projects_dir = tmp_path / "projects" + projects_dir.mkdir() + + # Core file + (projects_dir / "important.md").write_text( + "---\nid: projects-important\ncategory: projects\ntype: ProjectSnapshot\ncore: true\n---\nBody" + ) + # Non-core file + (projects_dir / "normal.md").write_text( + "---\nid: projects-normal\ncategory: projects\ntype: ProjectSnapshot\n---\nBody" + ) + + result = run_lint_pass() + assert result["core_count"] == 1 diff --git a/tests/test_migrate_openclaw.py b/tests/test_migrate_openclaw.py new file mode 100644 index 0000000..8178e9b --- /dev/null +++ b/tests/test_migrate_openclaw.py @@ -0,0 +1,254 @@ +"""Tests for palinode.migration.openclaw — OpenClaw MEMORY.md import.""" +from __future__ import annotations + +import os +import textwrap +from pathlib import Path +from unittest.mock import patch + +import pytest + +from palinode.migration.openclaw import ( + _detect_type, + _slugify, + parse_memory_md, + run_migration, +) + +# ── Fixtures ───────────────────────────────────────────────────────────────── + +SAMPLE_MEMORY_MD = textwrap.dedent("""\ + # Memory + + Some preamble before any sections. + + ## Alice the Engineer + Alice is a senior engineer on the platform team. She owns the deploy pipeline. + She prefers async communication. + + ## Chose PostgreSQL over MySQL + We decided to use PostgreSQL because it has better JSON support + and the team already knew it. Rationale: cost + familiarity. + + ## Project Alpha roadmap + Project Alpha covers the new billing sprint. Current milestone: v1.2. + Tasks include migrating the payment service. + + ## Useful debugging trick + When the API hangs, run `strace -p ` to see which syscall is blocking. +""") + + +@pytest.fixture() +def memory_md_file(tmp_path: Path) -> Path: + p = tmp_path / "MEMORY.md" + p.write_text(SAMPLE_MEMORY_MD, encoding="utf-8") + return p + + +@pytest.fixture() +def fake_memory_dir(tmp_path: Path) -> Path: + mem = tmp_path / "palinode" + mem.mkdir() + # Minimal git repo so the git commit call doesn't crash + os.system(f"git init -q {mem} 2>/dev/null") + os.system(f"git -C {mem} config user.email 'test@test.com' 2>/dev/null") + os.system(f"git -C {mem} config user.name 'Test' 2>/dev/null") + return mem + + +# ── parse_memory_md ─────────────────────────────────────────────────────────── + +def test_parse_sections(memory_md_file: Path) -> None: + sections = parse_memory_md(str(memory_md_file)) + assert len(sections) == 4 + headings = [s["heading"] for s in sections] + assert "Alice the Engineer" in headings + assert "Chose PostgreSQL over MySQL" in headings + assert "Project Alpha roadmap" in headings + assert "Useful debugging trick" in headings + + +def test_parse_body_content(memory_md_file: Path) -> None: + sections = parse_memory_md(str(memory_md_file)) + alice = next(s for s in sections if s["heading"] == "Alice the Engineer") + assert "senior engineer" in alice["body"] + assert "deploy pipeline" in alice["body"] + + +def test_parse_empty_file(tmp_path: Path) -> None: + p = tmp_path / "empty.md" + p.write_text("", encoding="utf-8") + sections = parse_memory_md(str(p)) + assert sections == [] + + +def test_parse_no_sections(tmp_path: Path) -> None: + p = tmp_path / "flat.md" + p.write_text("Just some flat text with no headings.", encoding="utf-8") + sections = parse_memory_md(str(p)) + assert sections == [] + + +# ── _detect_type ───────────────────────────────────────────────────────────── + +def test_type_person_from_heading() -> None: + assert _detect_type("Who is Bob", "Bob is a developer") == "person" + + +def test_type_person_from_body() -> None: + assert _detect_type("Bob", "Bob is a person on the team") == "person" + + +def test_type_decision_decided() -> None: + assert _detect_type("Database choice", "We decided to use Postgres") == "decision" + + +def test_type_decision_because() -> None: + assert _detect_type("Framework", "Chose FastAPI because it is fast") == "decision" + + +def test_type_project() -> None: + assert _detect_type("Alpha Sprint", "Project roadmap for the sprint") == "project" + + +def test_type_insight_fallback() -> None: + assert _detect_type("Debugging tip", "Run strace to see syscalls") == "insight" + + +def test_type_person_wins_over_decision() -> None: + # Person keyword takes priority even when decision words also present + result = _detect_type("Who decided", "This person decided to quit") + assert result == "person" + + +# ── _slugify ───────────────────────────────────────────────────────────────── + +def test_slugify_basic() -> None: + assert _slugify("Hello World") == "hello-world" + + +def test_slugify_special_chars() -> None: + slug = _slugify("Chose PostgreSQL over MySQL!") + assert slug == "chose-postgresql-over-mysql" + + +def test_slugify_truncates() -> None: + long_heading = "a" * 100 + assert len(_slugify(long_heading)) <= 60 + + +# ── run_migration — dry_run ─────────────────────────────────────────────────── + +def test_dry_run_creates_no_files( + memory_md_file: Path, fake_memory_dir: Path +) -> None: + with patch("palinode.migration.openclaw.config") as mock_cfg: + mock_cfg.memory_dir = str(fake_memory_dir) + result = run_migration(str(memory_md_file), dry_run=True) + + assert result["dry_run"] is True + assert result["sections_found"] == 4 + assert len(result["files_created"]) == 4 + assert result["log_file"] is None + + # Nothing written to disk + for subdir in ("people", "decisions", "projects", "insights"): + assert not (fake_memory_dir / subdir).exists() + + +def test_dry_run_reports_correct_types( + memory_md_file: Path, fake_memory_dir: Path +) -> None: + with patch("palinode.migration.openclaw.config") as mock_cfg: + mock_cfg.memory_dir = str(fake_memory_dir) + result = run_migration(str(memory_md_file), dry_run=True) + + paths = result["files_created"] + subdirs = {p.split("/")[0] for p in paths} + assert "people" in subdirs + assert "decisions" in subdirs + assert "projects" in subdirs + assert "insights" in subdirs + + +# ── run_migration — real write ──────────────────────────────────────────────── + +def test_migration_writes_files( + memory_md_file: Path, fake_memory_dir: Path +) -> None: + with patch("palinode.migration.openclaw.config") as mock_cfg: + mock_cfg.memory_dir = str(fake_memory_dir) + result = run_migration(str(memory_md_file), dry_run=False) + + assert result["sections_found"] == 4 + assert len(result["files_created"]) == 4 + assert len(result["files_skipped"]) == 0 + + for rel_path in result["files_created"]: + abs_path = fake_memory_dir / rel_path + assert abs_path.exists(), f"Expected {abs_path} to exist" + content = abs_path.read_text(encoding="utf-8") + assert content.startswith("---\n") + assert "source: openclaw-migration" in content + + +def test_migration_creates_log_file( + memory_md_file: Path, fake_memory_dir: Path +) -> None: + with patch("palinode.migration.openclaw.config") as mock_cfg: + mock_cfg.memory_dir = str(fake_memory_dir) + result = run_migration(str(memory_md_file), dry_run=False) + + assert result["log_file"] is not None + log_path = fake_memory_dir / result["log_file"] + assert log_path.exists() + log_content = log_path.read_text(encoding="utf-8") + assert "OpenClaw Migration" in log_content + assert "Files created: 4" in log_content + + +def test_migration_frontmatter_fields( + memory_md_file: Path, fake_memory_dir: Path +) -> None: + import yaml as _yaml + + with patch("palinode.migration.openclaw.config") as mock_cfg: + mock_cfg.memory_dir = str(fake_memory_dir) + result = run_migration(str(memory_md_file), dry_run=False) + + for rel_path in result["files_created"]: + abs_path = fake_memory_dir / rel_path + content = abs_path.read_text(encoding="utf-8") + # Strip the --- delimiters and parse + fm_block = content.split("---\n")[1] + fm = _yaml.safe_load(fm_block) + assert "id" in fm + assert "category" in fm + assert "name" in fm + assert "last_updated" in fm + assert fm["source"] == "openclaw-migration" + + +# ── Deduplication ───────────────────────────────────────────────────────────── + +def test_deduplication_skips_identical_content( + memory_md_file: Path, fake_memory_dir: Path +) -> None: + with patch("palinode.migration.openclaw.config") as mock_cfg: + mock_cfg.memory_dir = str(fake_memory_dir) + result1 = run_migration(str(memory_md_file), dry_run=False) + result2 = run_migration(str(memory_md_file), dry_run=False) + + # Second run: all should be skipped, none created + assert len(result2["files_created"]) == 0 + assert len(result2["files_skipped"]) == 4 + + +# ── Path validation ─────────────────────────────────────────────────────────── + +def test_rejects_null_byte_in_path() -> None: + from palinode.migration.openclaw import _validate_source_path + + with pytest.raises(ValueError, match="Null bytes"): + _validate_source_path("/some/path\x00evil") diff --git a/tests/test_parser.py b/tests/test_parser.py index daf2e13..09d3e1f 100644 --- a/tests/test_parser.py +++ b/tests/test_parser.py @@ -1,4 +1,4 @@ -from palinode.core.parser import parse_markdown +from palinode.core.parser import parse_markdown, _build_canonical_question_prefix def test_parse_frontmatter(): content = """--- @@ -43,3 +43,94 @@ def test_parse_splits_sections_if_long(): assert len(sections) >= 2 assert any(s["section_id"] == "section-1" for s in sections) assert any(s["section_id"] == "section-2" for s in sections) + + +# ── canonical_question tests ────────────────────────────────────────────── + +def test_build_canonical_question_prefix_string(): + prefix = _build_canonical_question_prefix({"canonical_question": "What is auth?"}) + assert prefix == "Q: What is auth?\n\n" + +def test_build_canonical_question_prefix_list(): + prefix = _build_canonical_question_prefix({ + "canonical_question": ["What is auth?", "How does login work?"] + }) + assert prefix == "Q: What is auth?\nQ: How does login work?\n\n" + +def test_build_canonical_question_prefix_absent(): + assert _build_canonical_question_prefix({}) == "" + assert _build_canonical_question_prefix({"id": "x"}) == "" + +def test_build_canonical_question_prefix_empty_string(): + assert _build_canonical_question_prefix({"canonical_question": ""}) == "" + +def test_build_canonical_question_prefix_empty_list(): + assert _build_canonical_question_prefix({"canonical_question": []}) == "" + +def test_build_canonical_question_prefix_non_string_type(): + assert _build_canonical_question_prefix({"canonical_question": 42}) == "" + + +def test_canonical_question_prepended_short_doc(): + content = """--- +id: auth-decision +canonical_question: What did we decide about authentication? +--- +We chose JWT tokens for stateless auth. +""" + meta, sections = parse_markdown(content) + assert len(sections) == 1 + assert sections[0]["content"].startswith("Q: What did we decide about authentication?\n\n") + assert "JWT tokens" in sections[0]["content"] + + +def test_canonical_question_list_prepended_short_doc(): + content = """--- +id: auth-decision +canonical_question: + - What did we decide about authentication? + - How does login work? +--- +We chose JWT tokens for stateless auth. +""" + meta, sections = parse_markdown(content) + assert sections[0]["content"].startswith("Q: What did we decide about authentication?\n") + assert "Q: How does login work?" in sections[0]["content"] + assert "JWT tokens" in sections[0]["content"] + + +def test_canonical_question_prepended_long_doc(): + """For long docs with sections, canonical_question is prepended to the first chunk only.""" + preamble = "a" * 2000 + content = f"""--- +id: test-long +canonical_question: Why is the preamble so long? +--- +{preamble} + +## Section 1 + +Content 1 + +## Section 2 + +Content 2 +""" + meta, sections = parse_markdown(content) + # First chunk (root/preamble) should have the prefix + first = sections[0] + assert first["content"].startswith("Q: Why is the preamble so long?\n\n") + # Later sections should NOT have the prefix + for sec in sections[1:]: + assert not sec["content"].startswith("Q:") + + +def test_no_canonical_question_leaves_content_unchanged(): + content = """--- +id: plain +--- +Just some content. +""" + meta, sections = parse_markdown(content) + assert not sections[0]["content"].startswith("Q:") + assert "Just some content." in sections[0]["content"] diff --git a/tests/test_scope_chain.py b/tests/test_scope_chain.py new file mode 100644 index 0000000..bcf7001 --- /dev/null +++ b/tests/test_scope_chain.py @@ -0,0 +1,138 @@ +"""Tests for ADR-009 Layer 1 scope chain resolution. + +This slice covers pure resolution: ScopeChain shape, ordering, empty +behavior, env var precedence, and backwards compatibility with the +pre-scope default. +""" +from __future__ import annotations + +import pytest + +import palinode.core.config as config_module +from palinode.core.scope import ScopeChain, resolve_scope_chain + + +# ---------- ScopeChain shape and serialization ---------- + + +def test_scope_chain_default_is_empty(): + chain = ScopeChain() + assert chain.is_empty() + assert chain.as_list() == [] + + +def test_scope_chain_narrow_to_broad_order(): + chain = ScopeChain( + session="abc", + agent="researcher", + harness="claude-code", + project="palinode", + member="paul", + org="phasespace", + ) + assert chain.as_list() == [ + "session/abc", + "agent/researcher", + "harness/claude-code", + "project/palinode", + "member/paul", + "org/phasespace", + ] + + +def test_scope_chain_drops_unset_levels(): + chain = ScopeChain(project="palinode", member="paul") + assert chain.as_list() == ["project/palinode", "member/paul"] + + +def test_scope_chain_is_frozen(): + chain = ScopeChain(project="palinode") + with pytest.raises((AttributeError, Exception)): + chain.project = "other" # type: ignore[misc] + + +# ---------- resolve_scope_chain from config ---------- + + +def _fresh_config(monkeypatch, env: dict[str, str] | None = None) -> config_module.Config: + """Build a fresh config that reads the current env vars. + + load_config() reads env vars on every call, so no module reload is needed. + Reloading would replace the module-level ``config`` singleton and break + any other module that holds a reference to the original instance. + """ + for key in ( + "PALINODE_ORG", + "PALINODE_MEMBER", + "PALINODE_HARNESS", + "PALINODE_AGENT", + ): + monkeypatch.delenv(key, raising=False) + for key, value in (env or {}).items(): + monkeypatch.setenv(key, value) + return config_module.load_config() + + +def test_resolve_scope_chain_defaults_empty(monkeypatch): + cfg = _fresh_config(monkeypatch) + chain = resolve_scope_chain(cfg) + assert chain.is_empty() + + +def test_resolve_scope_chain_reads_env_vars(monkeypatch): + cfg = _fresh_config( + monkeypatch, + env={ + "PALINODE_ORG": "phasespace", + "PALINODE_MEMBER": "paul", + "PALINODE_HARNESS": "claude-code", + }, + ) + chain = resolve_scope_chain(cfg, project="palinode", session_id="s1") + assert chain.as_list() == [ + "session/s1", + "harness/claude-code", + "project/palinode", + "member/paul", + "org/phasespace", + ] + + +def test_resolve_scope_chain_multi_agent(monkeypatch): + cfg = _fresh_config( + monkeypatch, + env={ + "PALINODE_MEMBER": "paul", + "PALINODE_AGENT": "researcher", + }, + ) + chain = resolve_scope_chain(cfg, project="palinode") + assert "agent/researcher" in chain.as_list() + assert chain.as_list().index("agent/researcher") < chain.as_list().index("project/palinode") + + +def test_resolve_scope_chain_project_passed_by_caller(monkeypatch): + """The project level comes from the caller (ADR-008 detection), not config env.""" + cfg = _fresh_config(monkeypatch, env={"PALINODE_MEMBER": "paul"}) + a = resolve_scope_chain(cfg, project="palinode") + b = resolve_scope_chain(cfg, project="other-project") + assert "project/palinode" in a.as_list() + assert "project/other-project" in b.as_list() + assert "project/palinode" not in b.as_list() + + +def test_scope_config_defaults_are_none(monkeypatch): + cfg = _fresh_config(monkeypatch) + assert cfg.scope.org is None + assert cfg.scope.member is None + assert cfg.scope.harness is None + assert cfg.scope.agent is None + assert cfg.scope.enabled is False + assert cfg.scope.prime_mode == "classic" + + +def test_backwards_compat_no_scope_config_in_yaml(monkeypatch): + """Existing installs with no scope: block in their YAML must still load.""" + cfg = _fresh_config(monkeypatch) + assert hasattr(cfg, "scope") + assert resolve_scope_chain(cfg).is_empty() diff --git a/tests/test_search_dedup.py b/tests/test_search_dedup.py new file mode 100644 index 0000000..f8d0b3e --- /dev/null +++ b/tests/test_search_dedup.py @@ -0,0 +1,150 @@ +"""Tests for #91: deduplicate search results by file (score-gap based).""" +import pytest +from unittest.mock import patch, MagicMock +from palinode.core import store +from palinode.core.config import config + + +def test_dedup_suppresses_low_scoring_chunks(): + """Chunks far below the file's best score should be suppressed. + + RRF compresses rank-based scores into a narrow band, so we use a tight + gap (0.01) and wide rank separation to create a meaningful score delta. + README intro appears at rank 1 in both vec+fts (strong signal), while + README faq only appears deep in one list (weak signal). + """ + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + original_gap = config.search.dedup_score_gap + try: + # Tight gap so that only chunks with near-identical RRF scores survive + config.search.dedup_score_gap = 0.01 + + # README intro ranks #1 in both lists → high combined RRF + # README faq ranks last in vec only → much lower RRF + # guide.md ranks #2 in both → second highest + mock_vec.return_value = [ + {"file_path": "README.md", "section_id": "intro", "content": "intro", "score": 0.95}, + {"file_path": "guide.md", "section_id": "root", "content": "guide", "score": 0.90}, + {"file_path": "other1.md", "section_id": "root", "content": "o1", "score": 0.80}, + {"file_path": "other2.md", "section_id": "root", "content": "o2", "score": 0.70}, + {"file_path": "other3.md", "section_id": "root", "content": "o3", "score": 0.60}, + {"file_path": "other4.md", "section_id": "root", "content": "o4", "score": 0.50}, + {"file_path": "other5.md", "section_id": "root", "content": "o5", "score": 0.40}, + {"file_path": "other6.md", "section_id": "root", "content": "o6", "score": 0.30}, + {"file_path": "README.md", "section_id": "faq", "content": "faq", "score": 0.20}, + ] + mock_fts.return_value = [ + {"file_path": "README.md", "section_id": "intro", "content": "intro", "score": 0.90}, + {"file_path": "guide.md", "section_id": "root", "content": "guide", "score": 0.80}, + ] + + results = store.search_hybrid("setup", query_embedding=[0.0]*1024, top_k=10, threshold=0.0) + + readme_results = [r for r in results if r["file_path"] == "README.md"] + # faq chunk (rank 9 vec-only) should be suppressed vs intro (rank 1 in both) + assert len(readme_results) == 1 + assert readme_results[0]["section_id"] == "intro" + assert any(r["file_path"] == "guide.md" for r in results) + finally: + config.search.dedup_score_gap = original_gap + + +def test_dedup_keeps_close_scoring_chunks(): + """Chunks within the score gap of the file's best should be kept.""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + # Two chunks from same file, both ranked high in vector results + # After RRF normalization, they'll be close in score + mock_vec.return_value = [ + {"file_path": "notes.md", "section_id": "s1", "content": "chunk 1", "score": 0.95}, + {"file_path": "notes.md", "section_id": "s2", "content": "chunk 2", "score": 0.93}, + ] + mock_fts.return_value = [ + {"file_path": "notes.md", "section_id": "s1", "content": "chunk 1", "score": 0.90}, + {"file_path": "notes.md", "section_id": "s2", "content": "chunk 2", "score": 0.88}, + ] + + results = store.search_hybrid("test", query_embedding=[0.0]*1024, top_k=10, threshold=0.0) + + # Both chunks should survive — their scores are very close + assert len(results) == 2 + + +def test_dedup_respects_top_k_after_filtering(): + """top_k should limit total results after dedup.""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + mock_vec.return_value = [ + {"file_path": "a.md", "section_id": "root", "content": "a", "score": 0.9}, + {"file_path": "b.md", "section_id": "root", "content": "b", "score": 0.8}, + {"file_path": "c.md", "section_id": "root", "content": "c", "score": 0.7}, + {"file_path": "d.md", "section_id": "root", "content": "d", "score": 0.6}, + ] + mock_fts.return_value = [] + + results = store.search_hybrid("test", query_embedding=[0.0]*1024, top_k=2, threshold=0.0) + + assert len(results) == 2 + assert results[0]["file_path"] == "a.md" + assert results[1]["file_path"] == "b.md" + + +def test_dedup_single_chunk_files_unchanged(): + """Files with only one chunk each should pass through unchanged.""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + mock_vec.return_value = [ + {"file_path": "a.md", "content": "a", "score": 0.9}, + {"file_path": "b.md", "content": "b", "score": 0.8}, + {"file_path": "c.md", "content": "c", "score": 0.7}, + ] + mock_fts.return_value = [ + {"file_path": "b.md", "content": "b", "score": 0.9}, + {"file_path": "a.md", "content": "a", "score": 0.7}, + ] + + results = store.search_hybrid("test", query_embedding=[0.0]*1024, top_k=10, threshold=0.0) + + assert len(results) == 3 + fps = {r["file_path"] for r in results} + assert fps == {"a.md", "b.md", "c.md"} + + +def test_dedup_configurable_gap(): + """The score gap threshold should be configurable.""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + # Two chunks: rank 1 and rank 2 in vector results only + # RRF scores: rank1 = 1/61, rank2 = 1/62 + # Normalized: rank1 = 1.0, rank2 = 61/62 ≈ 0.984 + # Gap = 0.016 — within any reasonable threshold + mock_vec.return_value = [ + {"file_path": "f.md", "section_id": "s1", "content": "best", "score": 0.95}, + {"file_path": "f.md", "section_id": "s2", "content": "second", "score": 0.90}, + ] + mock_fts.return_value = [] + + # With gap=0.0, only exact ties kept → only best chunk + original_gap = config.search.dedup_score_gap + try: + config.search.dedup_score_gap = 0.0 + results_strict = store.search_hybrid( + "test", query_embedding=[0.0]*1024, top_k=10, threshold=0.0 + ) + + # With gap=1.0, everything kept + config.search.dedup_score_gap = 1.0 + results_loose = store.search_hybrid( + "test", query_embedding=[0.0]*1024, top_k=10, threshold=0.0 + ) + finally: + config.search.dedup_score_gap = original_gap + + assert len(results_strict) == 1 + assert len(results_loose) == 2 diff --git a/tests/test_session_end.py b/tests/test_session_end.py new file mode 100644 index 0000000..95224ff --- /dev/null +++ b/tests/test_session_end.py @@ -0,0 +1,68 @@ +"""Tests for session-end dual-write: daily append + individual file (M0).""" +import os +from unittest import mock + +from palinode.core.config import config + + +def test_session_end_creates_daily_and_individual(tmp_path, monkeypatch): + """session_end_api should write to daily/ AND create an individual memory file.""" + memory_dir = str(tmp_path) + monkeypatch.setattr(config, "memory_dir", memory_dir) + + # Disable git auto-commit for this test + monkeypatch.setattr(config.git, "auto_commit", False) + + # Mock the description generator to avoid Ollama dependency + with mock.patch("palinode.api.server._generate_description", return_value="Test session summary"): + from palinode.api.server import session_end_api, SessionEndRequest + + req = SessionEndRequest( + summary="Implemented entity normalization", + decisions=["Use category-based prefix inference"], + blockers=["Need to test with live Ollama"], + project="palinode", + source="test", + ) + result = session_end_api(req) + + # Daily file should exist + daily_file = result["daily_file"] + daily_path = os.path.join(memory_dir, daily_file) + assert os.path.exists(daily_path), f"Daily file not found: {daily_path}" + daily_content = open(daily_path).read() + assert "Implemented entity normalization" in daily_content + + # Individual file should exist + individual_file = result.get("individual_file") + assert individual_file is not None, "individual_file should be set" + assert os.path.exists(individual_file), f"Individual file not found: {individual_file}" + + # Individual file should have frontmatter with entities + ind_content = open(individual_file).read() + assert "project/palinode" in ind_content + assert "description:" in ind_content + + +def test_session_end_no_project(tmp_path, monkeypatch): + """session_end without project should still create individual file as Insight.""" + memory_dir = str(tmp_path) + monkeypatch.setattr(config, "memory_dir", memory_dir) + monkeypatch.setattr(config.git, "auto_commit", False) + + with mock.patch("palinode.api.server._generate_description", return_value="Quick fix"): + from palinode.api.server import session_end_api, SessionEndRequest + + req = SessionEndRequest( + summary="Quick debugging session", + source="test", + ) + result = session_end_api(req) + + individual_file = result.get("individual_file") + assert individual_file is not None + assert os.path.exists(individual_file) + + # Should be in insights/ category (Insight type) + ind_content = open(individual_file).read() + assert "category: insights" in ind_content diff --git a/tests/test_store.py b/tests/test_store.py index 4885ad9..c7ef4b7 100644 --- a/tests/test_store.py +++ b/tests/test_store.py @@ -36,12 +36,107 @@ def test_search_hybrid_empty(): res = store.search_hybrid("query", query_embedding=[0.0]*1024, top_k=2) assert len(res) == 0 +def test_search_returns_raw_score(): + """search() should include raw_score equal to score (cosine similarity).""" + with patch("palinode.core.store.get_db") as mock_db: + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.fetchall.return_value = [ + { + "file_path": "test.md", + "section_id": "root", + "content": "hello world", + "category": "insight", + "metadata": '{}', + "created_at": "2025-01-01", + "last_updated": "2025-01-01", + "distance": 0.5, # L2 distance → cosine = 1 - (0.5^2 / 2) = 0.875 + } + ] + mock_conn.cursor.return_value = mock_cursor + mock_db.return_value = mock_conn + + res = store.search(query_embedding=[0.0] * 1024, threshold=0.0) + assert len(res) == 1 + assert "raw_score" in res[0] + assert res[0]["raw_score"] == res[0]["score"] + assert abs(res[0]["raw_score"] - 0.875) < 0.001 + + +def test_search_hybrid_raw_score_from_vector(): + """search_hybrid() should expose the original cosine similarity as raw_score.""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + mock_vec.return_value = [ + {"file_path": "a.md", "section_id": "root", "content": "text a", "score": 0.9, "raw_score": 0.9}, + {"file_path": "b.md", "section_id": "root", "content": "text b", "score": 0.7, "raw_score": 0.7}, + ] + mock_fts.return_value = [ + {"file_path": "a.md", "section_id": "root", "content": "text a", "score": 0.8}, + ] + + res = store.search_hybrid("query", query_embedding=[0.0] * 1024, top_k=10, threshold=0.0) + + a_results = [r for r in res if r["file_path"] == "a.md"] + b_results = [r for r in res if r["file_path"] == "b.md"] + + # a.md came from vector search → raw_score should be the cosine similarity + assert len(a_results) == 1 + assert a_results[0]["raw_score"] == 0.9 + + # b.md also came from vector search + assert len(b_results) == 1 + assert b_results[0]["raw_score"] == 0.7 + + # RRF score should differ from raw_score (it's normalized) + assert a_results[0]["score"] != a_results[0]["raw_score"] + + +def test_search_hybrid_bm25_only_raw_score_is_none(): + """Results that only came from BM25 (no vector match) should have raw_score=None.""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + mock_vec.return_value = [ + {"file_path": "a.md", "section_id": "root", "content": "text a", "score": 0.9, "raw_score": 0.9}, + ] + mock_fts.return_value = [ + {"file_path": "a.md", "section_id": "root", "content": "text a", "score": 0.8}, + {"file_path": "bm25only.md", "section_id": "root", "content": "keyword match", "score": 0.7}, + ] + + res = store.search_hybrid("query", query_embedding=[0.0] * 1024, top_k=10, threshold=0.0) + + bm25_results = [r for r in res if r["file_path"] == "bm25only.md"] + assert len(bm25_results) == 1 + assert bm25_results[0]["raw_score"] is None + + +def test_search_hybrid_raw_score_preserved_through_rrf(): + """raw_score should be the original cosine sim, not affected by RRF normalization.""" + with patch("palinode.core.store.search") as mock_vec: + with patch("palinode.core.store.search_fts") as mock_fts: + with patch("palinode.core.store.get_db"): + mock_vec.return_value = [ + {"file_path": "x.md", "section_id": "root", "content": "x", "score": 0.85, "raw_score": 0.85}, + ] + mock_fts.return_value = [] + + res = store.search_hybrid("query", query_embedding=[0.0] * 1024, top_k=10, threshold=0.0) + assert len(res) == 1 + # raw_score should be the original cosine similarity (0.85) + assert res[0]["raw_score"] == 0.85 + # RRF score is normalized to 1.0 (only result → max score) + assert res[0]["score"] == 1.0 + + def test_detect_entities_in_text(): with patch("palinode.core.store.get_db") as mock_db: mock_conn = MagicMock() mock_conn.execute.return_value.fetchall.return_value = [("person/alice",), ("project/alpha",)] mock_db.return_value = mock_conn - + res = store.detect_entities_in_text("Saw alice today regarding project alpha") assert "person/alice" in res assert "project/alpha" in res diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..e690f25 --- /dev/null +++ b/uv.lock @@ -0,0 +1,940 @@ +version = 1 +revision = 3 +requires-python = ">=3.11" + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, +] + +[[package]] +name = "attrs" +version = "26.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" }, +] + +[[package]] +name = "certifi" +version = "2026.2.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "cryptography" +version = "46.0.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a4/ba/04b1bd4218cbc58dc90ce967106d51582371b898690f3ae0402876cc4f34/cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759", size = 750542, upload-time = "2026-03-25T23:34:53.396Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/23/9285e15e3bc57325b0a72e592921983a701efc1ee8f91c06c5f0235d86d9/cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8", size = 7176401, upload-time = "2026-03-25T23:33:22.096Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/e61f8f13950ab6195b31913b42d39f0f9afc7d93f76710f299b5ec286ae6/cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30", size = 4275275, upload-time = "2026-03-25T23:33:23.844Z" }, + { url = "https://files.pythonhosted.org/packages/19/69/732a736d12c2631e140be2348b4ad3d226302df63ef64d30dfdb8db7ad1c/cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a", size = 4425320, upload-time = "2026-03-25T23:33:25.703Z" }, + { url = "https://files.pythonhosted.org/packages/d4/12/123be7292674abf76b21ac1fc0e1af50661f0e5b8f0ec8285faac18eb99e/cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175", size = 4278082, upload-time = "2026-03-25T23:33:27.423Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ba/d5e27f8d68c24951b0a484924a84c7cdaed7502bac9f18601cd357f8b1d2/cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463", size = 4926514, upload-time = "2026-03-25T23:33:29.206Z" }, + { url = "https://files.pythonhosted.org/packages/34/71/1ea5a7352ae516d5512d17babe7e1b87d9db5150b21f794b1377eac1edc0/cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97", size = 4457766, upload-time = "2026-03-25T23:33:30.834Z" }, + { url = "https://files.pythonhosted.org/packages/01/59/562be1e653accee4fdad92c7a2e88fced26b3fdfce144047519bbebc299e/cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c", size = 3986535, upload-time = "2026-03-25T23:33:33.02Z" }, + { url = "https://files.pythonhosted.org/packages/d6/8b/b1ebfeb788bf4624d36e45ed2662b8bd43a05ff62157093c1539c1288a18/cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507", size = 4277618, upload-time = "2026-03-25T23:33:34.567Z" }, + { url = "https://files.pythonhosted.org/packages/dd/52/a005f8eabdb28df57c20f84c44d397a755782d6ff6d455f05baa2785bd91/cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19", size = 4890802, upload-time = "2026-03-25T23:33:37.034Z" }, + { url = "https://files.pythonhosted.org/packages/ec/4d/8e7d7245c79c617d08724e2efa397737715ca0ec830ecb3c91e547302555/cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738", size = 4457425, upload-time = "2026-03-25T23:33:38.904Z" }, + { url = "https://files.pythonhosted.org/packages/1d/5c/f6c3596a1430cec6f949085f0e1a970638d76f81c3ea56d93d564d04c340/cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c", size = 4405530, upload-time = "2026-03-25T23:33:40.842Z" }, + { url = "https://files.pythonhosted.org/packages/7e/c9/9f9cea13ee2dbde070424e0c4f621c091a91ffcc504ffea5e74f0e1daeff/cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f", size = 4667896, upload-time = "2026-03-25T23:33:42.781Z" }, + { url = "https://files.pythonhosted.org/packages/ad/b5/1895bc0821226f129bc74d00eccfc6a5969e2028f8617c09790bf89c185e/cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2", size = 3026348, upload-time = "2026-03-25T23:33:45.021Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f8/c9bcbf0d3e6ad288b9d9aa0b1dee04b063d19e8c4f871855a03ab3a297ab/cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124", size = 3483896, upload-time = "2026-03-25T23:33:46.649Z" }, + { url = "https://files.pythonhosted.org/packages/01/41/3a578f7fd5c70611c0aacba52cd13cb364a5dee895a5c1d467208a9380b0/cryptography-46.0.6-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:2ef9e69886cbb137c2aef9772c2e7138dc581fad4fcbcf13cc181eb5a3ab6275", size = 7117147, upload-time = "2026-03-25T23:33:48.249Z" }, + { url = "https://files.pythonhosted.org/packages/fa/87/887f35a6fca9dde90cad08e0de0c89263a8e59b2d2ff904fd9fcd8025b6f/cryptography-46.0.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7f417f034f91dcec1cb6c5c35b07cdbb2ef262557f701b4ecd803ee8cefed4f4", size = 4266221, upload-time = "2026-03-25T23:33:49.874Z" }, + { url = "https://files.pythonhosted.org/packages/aa/a8/0a90c4f0b0871e0e3d1ed126aed101328a8a57fd9fd17f00fb67e82a51ca/cryptography-46.0.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d24c13369e856b94892a89ddf70b332e0b70ad4a5c43cf3e9cb71d6d7ffa1f7b", size = 4408952, upload-time = "2026-03-25T23:33:52.128Z" }, + { url = "https://files.pythonhosted.org/packages/16/0b/b239701eb946523e4e9f329336e4ff32b1247e109cbab32d1a7b61da8ed7/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:aad75154a7ac9039936d50cf431719a2f8d4ed3d3c277ac03f3339ded1a5e707", size = 4270141, upload-time = "2026-03-25T23:33:54.11Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a8/976acdd4f0f30df7b25605f4b9d3d89295351665c2091d18224f7ad5cdbf/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:3c21d92ed15e9cfc6eb64c1f5a0326db22ca9c2566ca46d845119b45b4400361", size = 4904178, upload-time = "2026-03-25T23:33:55.725Z" }, + { url = "https://files.pythonhosted.org/packages/b1/1b/bf0e01a88efd0e59679b69f42d4afd5bced8700bb5e80617b2d63a3741af/cryptography-46.0.6-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:4668298aef7cddeaf5c6ecc244c2302a2b8e40f384255505c22875eebb47888b", size = 4441812, upload-time = "2026-03-25T23:33:57.364Z" }, + { url = "https://files.pythonhosted.org/packages/bb/8b/11df86de2ea389c65aa1806f331cae145f2ed18011f30234cc10ca253de8/cryptography-46.0.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:8ce35b77aaf02f3b59c90b2c8a05c73bac12cea5b4e8f3fbece1f5fddea5f0ca", size = 3963923, upload-time = "2026-03-25T23:33:59.361Z" }, + { url = "https://files.pythonhosted.org/packages/91/e0/207fb177c3a9ef6a8108f234208c3e9e76a6aa8cf20d51932916bd43bda0/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c89eb37fae9216985d8734c1afd172ba4927f5a05cfd9bf0e4863c6d5465b013", size = 4269695, upload-time = "2026-03-25T23:34:00.909Z" }, + { url = "https://files.pythonhosted.org/packages/21/5e/19f3260ed1e95bced52ace7501fabcd266df67077eeb382b79c81729d2d3/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:ed418c37d095aeddf5336898a132fba01091f0ac5844e3e8018506f014b6d2c4", size = 4869785, upload-time = "2026-03-25T23:34:02.796Z" }, + { url = "https://files.pythonhosted.org/packages/10/38/cd7864d79aa1d92ef6f1a584281433419b955ad5a5ba8d1eb6c872165bcb/cryptography-46.0.6-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:69cf0056d6947edc6e6760e5f17afe4bea06b56a9ac8a06de9d2bd6b532d4f3a", size = 4441404, upload-time = "2026-03-25T23:34:04.35Z" }, + { url = "https://files.pythonhosted.org/packages/09/0a/4fe7a8d25fed74419f91835cf5829ade6408fd1963c9eae9c4bce390ecbb/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e7304c4f4e9490e11efe56af6713983460ee0780f16c63f219984dab3af9d2d", size = 4397549, upload-time = "2026-03-25T23:34:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/5f/a0/7d738944eac6513cd60a8da98b65951f4a3b279b93479a7e8926d9cd730b/cryptography-46.0.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b928a3ca837c77a10e81a814a693f2295200adb3352395fad024559b7be7a736", size = 4651874, upload-time = "2026-03-25T23:34:07.916Z" }, + { url = "https://files.pythonhosted.org/packages/cb/f1/c2326781ca05208845efca38bf714f76939ae446cd492d7613808badedf1/cryptography-46.0.6-cp314-cp314t-win32.whl", hash = "sha256:97c8115b27e19e592a05c45d0dd89c57f81f841cc9880e353e0d3bf25b2139ed", size = 3001511, upload-time = "2026-03-25T23:34:09.892Z" }, + { url = "https://files.pythonhosted.org/packages/c9/57/fe4a23eb549ac9d903bd4698ffda13383808ef0876cc912bcb2838799ece/cryptography-46.0.6-cp314-cp314t-win_amd64.whl", hash = "sha256:c797e2517cb7880f8297e2c0f43bb910e91381339336f75d2c1c2cbf811b70b4", size = 3471692, upload-time = "2026-03-25T23:34:11.613Z" }, + { url = "https://files.pythonhosted.org/packages/c4/cc/f330e982852403da79008552de9906804568ae9230da8432f7496ce02b71/cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a", size = 7162776, upload-time = "2026-03-25T23:34:13.308Z" }, + { url = "https://files.pythonhosted.org/packages/49/b3/dc27efd8dcc4bff583b3f01d4a3943cd8b5821777a58b3a6a5f054d61b79/cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8", size = 4270529, upload-time = "2026-03-25T23:34:15.019Z" }, + { url = "https://files.pythonhosted.org/packages/e6/05/e8d0e6eb4f0d83365b3cb0e00eb3c484f7348db0266652ccd84632a3d58d/cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77", size = 4414827, upload-time = "2026-03-25T23:34:16.604Z" }, + { url = "https://files.pythonhosted.org/packages/2f/97/daba0f5d2dc6d855e2dcb70733c812558a7977a55dd4a6722756628c44d1/cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290", size = 4271265, upload-time = "2026-03-25T23:34:18.586Z" }, + { url = "https://files.pythonhosted.org/packages/89/06/fe1fce39a37ac452e58d04b43b0855261dac320a2ebf8f5260dd55b201a9/cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410", size = 4916800, upload-time = "2026-03-25T23:34:20.561Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8a/b14f3101fe9c3592603339eb5d94046c3ce5f7fc76d6512a2d40efd9724e/cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d", size = 4448771, upload-time = "2026-03-25T23:34:22.406Z" }, + { url = "https://files.pythonhosted.org/packages/01/b3/0796998056a66d1973fd52ee89dc1bb3b6581960a91ad4ac705f182d398f/cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70", size = 3978333, upload-time = "2026-03-25T23:34:24.281Z" }, + { url = "https://files.pythonhosted.org/packages/c5/3d/db200af5a4ffd08918cd55c08399dc6c9c50b0bc72c00a3246e099d3a849/cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d", size = 4271069, upload-time = "2026-03-25T23:34:25.895Z" }, + { url = "https://files.pythonhosted.org/packages/d7/18/61acfd5b414309d74ee838be321c636fe71815436f53c9f0334bf19064fa/cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa", size = 4878358, upload-time = "2026-03-25T23:34:27.67Z" }, + { url = "https://files.pythonhosted.org/packages/8b/65/5bf43286d566f8171917cae23ac6add941654ccf085d739195a4eacf1674/cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58", size = 4448061, upload-time = "2026-03-25T23:34:29.375Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/7e49c0fa7205cf3597e525d156a6bce5b5c9de1fd7e8cb01120e459f205a/cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb", size = 4399103, upload-time = "2026-03-25T23:34:32.036Z" }, + { url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" }, + { url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" }, + { url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" }, + { url = "https://files.pythonhosted.org/packages/2e/84/7ccff00ced5bac74b775ce0beb7d1be4e8637536b522b5df9b73ada42da2/cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead", size = 3475444, upload-time = "2026-03-25T23:34:38.944Z" }, + { url = "https://files.pythonhosted.org/packages/bc/1f/4c926f50df7749f000f20eede0c896769509895e2648db5da0ed55db711d/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8", size = 4218227, upload-time = "2026-03-25T23:34:40.871Z" }, + { url = "https://files.pythonhosted.org/packages/c6/65/707be3ffbd5f786028665c3223e86e11c4cda86023adbc56bd72b1b6bab5/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0", size = 4381399, upload-time = "2026-03-25T23:34:42.609Z" }, + { url = "https://files.pythonhosted.org/packages/f3/6d/73557ed0ef7d73d04d9aba745d2c8e95218213687ee5e76b7d236a5030fc/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b", size = 4217595, upload-time = "2026-03-25T23:34:44.205Z" }, + { url = "https://files.pythonhosted.org/packages/9e/c5/e1594c4eec66a567c3ac4400008108a415808be2ce13dcb9a9045c92f1a0/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a", size = 4380912, upload-time = "2026-03-25T23:34:46.328Z" }, + { url = "https://files.pythonhosted.org/packages/1a/89/843b53614b47f97fe1abc13f9a86efa5ec9e275292c457af1d4a60dc80e0/cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e", size = 3409955, upload-time = "2026-03-25T23:34:48.465Z" }, +] + +[[package]] +name = "fastapi" +version = "0.135.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/73/5903c4b13beae98618d64eb9870c3fac4f605523dd0312ca5c80dadbd5b9/fastapi-0.135.2.tar.gz", hash = "sha256:88a832095359755527b7f63bb4c6bc9edb8329a026189eed83d6c1afcf419d56", size = 395833, upload-time = "2026-03-23T14:12:41.697Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/ea/18f6d0457f9efb2fc6fa594857f92810cadb03024975726db6546b3d6fcf/fastapi-0.135.2-py3-none-any.whl", hash = "sha256:0af0447d541867e8db2a6a25c23a8c4bd80e2394ac5529bd87501bbb9e240ca5", size = 117407, upload-time = "2026-03-23T14:12:43.284Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "httpx-sse" +version = "0.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "mcp" +version = "1.26.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "pyjwt", extra = ["crypto"] }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "palinode" +version = "0.7.0" +source = { editable = "." } +dependencies = [ + { name = "click" }, + { name = "fastapi" }, + { name = "httpx" }, + { name = "mcp" }, + { name = "pydantic" }, + { name = "python-frontmatter" }, + { name = "pyyaml" }, + { name = "rich" }, + { name = "sqlite-vec" }, + { name = "uvicorn" }, + { name = "watchdog" }, +] + +[package.optional-dependencies] +dev = [ + { name = "pytest" }, + { name = "pytest-asyncio" }, +] + +[package.metadata] +requires-dist = [ + { name = "click" }, + { name = "fastapi" }, + { name = "httpx" }, + { name = "mcp" }, + { name = "pydantic" }, + { name = "pytest", marker = "extra == 'dev'" }, + { name = "pytest-asyncio", marker = "extra == 'dev'" }, + { name = "python-frontmatter" }, + { name = "pyyaml" }, + { name = "rich" }, + { name = "sqlite-vec" }, + { name = "uvicorn" }, + { name = "watchdog" }, +] +provides-extras = ["dev"] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/72/74a989dd9f2084b3d9530b0915fdda64ac48831c30dbf7c72a41a5232db8/pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6", size = 2105873, upload-time = "2025-11-04T13:39:31.373Z" }, + { url = "https://files.pythonhosted.org/packages/12/44/37e403fd9455708b3b942949e1d7febc02167662bf1a7da5b78ee1ea2842/pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b", size = 1899826, upload-time = "2025-11-04T13:39:32.897Z" }, + { url = "https://files.pythonhosted.org/packages/33/7f/1d5cab3ccf44c1935a359d51a8a2a9e1a654b744b5e7f80d41b88d501eec/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a", size = 1917869, upload-time = "2025-11-04T13:39:34.469Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6a/30d94a9674a7fe4f4744052ed6c5e083424510be1e93da5bc47569d11810/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8", size = 2063890, upload-time = "2025-11-04T13:39:36.053Z" }, + { url = "https://files.pythonhosted.org/packages/50/be/76e5d46203fcb2750e542f32e6c371ffa9b8ad17364cf94bb0818dbfb50c/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e", size = 2229740, upload-time = "2025-11-04T13:39:37.753Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ee/fed784df0144793489f87db310a6bbf8118d7b630ed07aa180d6067e653a/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1", size = 2350021, upload-time = "2025-11-04T13:39:40.94Z" }, + { url = "https://files.pythonhosted.org/packages/c8/be/8fed28dd0a180dca19e72c233cbf58efa36df055e5b9d90d64fd1740b828/pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b", size = 2066378, upload-time = "2025-11-04T13:39:42.523Z" }, + { url = "https://files.pythonhosted.org/packages/b0/3b/698cf8ae1d536a010e05121b4958b1257f0b5522085e335360e53a6b1c8b/pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b", size = 2175761, upload-time = "2025-11-04T13:39:44.553Z" }, + { url = "https://files.pythonhosted.org/packages/b8/ba/15d537423939553116dea94ce02f9c31be0fa9d0b806d427e0308ec17145/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284", size = 2146303, upload-time = "2025-11-04T13:39:46.238Z" }, + { url = "https://files.pythonhosted.org/packages/58/7f/0de669bf37d206723795f9c90c82966726a2ab06c336deba4735b55af431/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594", size = 2340355, upload-time = "2025-11-04T13:39:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/e5/de/e7482c435b83d7e3c3ee5ee4451f6e8973cff0eb6007d2872ce6383f6398/pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e", size = 2319875, upload-time = "2025-11-04T13:39:49.705Z" }, + { url = "https://files.pythonhosted.org/packages/fe/e6/8c9e81bb6dd7560e33b9053351c29f30c8194b72f2d6932888581f503482/pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b", size = 1987549, upload-time = "2025-11-04T13:39:51.842Z" }, + { url = "https://files.pythonhosted.org/packages/11/66/f14d1d978ea94d1bc21fc98fcf570f9542fe55bfcc40269d4e1a21c19bf7/pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe", size = 2011305, upload-time = "2025-11-04T13:39:53.485Z" }, + { url = "https://files.pythonhosted.org/packages/56/d8/0e271434e8efd03186c5386671328154ee349ff0354d83c74f5caaf096ed/pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f", size = 1972902, upload-time = "2025-11-04T13:39:56.488Z" }, + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/11/72/90fda5ee3b97e51c494938a4a44c3a35a9c96c19bba12372fb9c634d6f57/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034", size = 2115441, upload-time = "2025-11-04T13:42:39.557Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/8942f884fa33f50794f119012dc6a1a02ac43a56407adaac20463df8e98f/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c", size = 1930291, upload-time = "2025-11-04T13:42:42.169Z" }, + { url = "https://files.pythonhosted.org/packages/79/c8/ecb9ed9cd942bce09fc888ee960b52654fbdbede4ba6c2d6e0d3b1d8b49c/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2", size = 1948632, upload-time = "2025-11-04T13:42:44.564Z" }, + { url = "https://files.pythonhosted.org/packages/2e/1b/687711069de7efa6af934e74f601e2a4307365e8fdc404703afc453eab26/pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad", size = 2138905, upload-time = "2025-11-04T13:42:47.156Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9b/1b3f0e9f9305839d7e84912f9e8bfbd191ed1b1ef48083609f0dabde978c/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26", size = 2101980, upload-time = "2025-11-04T13:43:25.97Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ed/d71fefcb4263df0da6a85b5d8a7508360f2f2e9b3bf5814be9c8bccdccc1/pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808", size = 1923865, upload-time = "2025-11-04T13:43:28.763Z" }, + { url = "https://files.pythonhosted.org/packages/ce/3a/626b38db460d675f873e4444b4bb030453bbe7b4ba55df821d026a0493c4/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc", size = 2134256, upload-time = "2025-11-04T13:43:31.71Z" }, + { url = "https://files.pythonhosted.org/packages/83/d9/8412d7f06f616bbc053d30cb4e5f76786af3221462ad5eee1f202021eb4e/pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1", size = 2174762, upload-time = "2025-11-04T13:43:34.744Z" }, + { url = "https://files.pythonhosted.org/packages/55/4c/162d906b8e3ba3a99354e20faa1b49a85206c47de97a639510a0e673f5da/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84", size = 2143141, upload-time = "2025-11-04T13:43:37.701Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f2/f11dd73284122713f5f89fc940f370d035fa8e1e078d446b3313955157fe/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770", size = 2330317, upload-time = "2025-11-04T13:43:40.406Z" }, + { url = "https://files.pythonhosted.org/packages/88/9d/b06ca6acfe4abb296110fb1273a4d848a0bfb2ff65f3ee92127b3244e16b/pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f", size = 2316992, upload-time = "2025-11-04T13:43:43.602Z" }, + { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, +] + +[[package]] +name = "pydantic-settings" +version = "2.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/52/6d/fffca34caecc4a3f97bda81b2098da5e8ab7efc9a66e819074a11955d87e/pydantic_settings-2.13.1.tar.gz", hash = "sha256:b4c11847b15237fb0171e1462bf540e294affb9b86db4d9aa5c01730bdbe4025", size = 223826, upload-time = "2026-02-19T13:45:08.055Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/4b/ccc026168948fec4f7555b9164c724cf4125eac006e176541483d2c959be/pydantic_settings-2.13.1-py3-none-any.whl", hash = "sha256:d56fd801823dbeae7f0975e1f8c8e25c258eb75d278ea7abb5d9cebb01b56237", size = 58929, upload-time = "2026-02-19T13:45:06.034Z" }, +] + +[[package]] +name = "pygments" +version = "2.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, +] + +[[package]] +name = "pyjwt" +version = "2.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c2/27/a3b6e5bf6ff856d2509292e95c8f57f0df7017cf5394921fc4e4ef40308a/pyjwt-2.12.1.tar.gz", hash = "sha256:c74a7a2adf861c04d002db713dd85f84beb242228e671280bf709d765b03672b", size = 102564, upload-time = "2026-03-13T19:27:37.25Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/7a/8dd906bd22e79e47397a61742927f6747fe93242ef86645ee9092e610244/pyjwt-2.12.1-py3-none-any.whl", hash = "sha256:28ca37c070cad8ba8cd9790cd940535d40274d22f80ab87f3ac6a713e6e8454c", size = 29726, upload-time = "2026-03-13T19:27:35.677Z" }, +] + +[package.optional-dependencies] +crypto = [ + { name = "cryptography" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/2c/8af215c0f776415f3590cac4f9086ccefd6fd463befeae41cd4d3f193e5a/pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5", size = 50087, upload-time = "2025-11-10T16:07:47.256Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/35/f8b19922b6a25bc0880171a2f1a003eaeb93657475193ab516fd87cac9da/pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5", size = 15075, upload-time = "2025-11-10T16:07:45.537Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" }, +] + +[[package]] +name = "python-frontmatter" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/de/910fa208120314a12f9a88ea63e03707261692af782c99283f1a2c8a5e6f/python-frontmatter-1.1.0.tar.gz", hash = "sha256:7118d2bd56af9149625745c58c9b51fb67e8d1294a0c76796dafdc72c36e5f6d", size = 16256, upload-time = "2024-01-16T18:50:04.052Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/87/3c8da047b3ec5f99511d1b4d7a5bc72d4b98751c7e78492d14dc736319c5/python_frontmatter-1.1.0-py3-none-any.whl", hash = "sha256:335465556358d9d0e6c98bbeb69b1c969f2a4a21360587b9873bfc3b213407c1", size = 9834, upload-time = "2024-01-16T18:50:00.911Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" }, +] + +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "referencing" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, +] + +[[package]] +name = "rich" +version = "14.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/6e/f964e88b3d2abee2a82c1ac8366da848fce1c6d834dc2132c3fda3970290/rpds_py-0.30.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a2bffea6a4ca9f01b3f8e548302470306689684e61602aa3d141e34da06cf425", size = 370157, upload-time = "2025-11-30T20:21:53.789Z" }, + { url = "https://files.pythonhosted.org/packages/94/ba/24e5ebb7c1c82e74c4e4f33b2112a5573ddc703915b13a073737b59b86e0/rpds_py-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc4f992dfe1e2bc3ebc7444f6c7051b4bc13cd8e33e43511e8ffd13bf407010d", size = 359676, upload-time = "2025-11-30T20:21:55.475Z" }, + { url = "https://files.pythonhosted.org/packages/84/86/04dbba1b087227747d64d80c3b74df946b986c57af0a9f0c98726d4d7a3b/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:422c3cb9856d80b09d30d2eb255d0754b23e090034e1deb4083f8004bd0761e4", size = 389938, upload-time = "2025-11-30T20:21:57.079Z" }, + { url = "https://files.pythonhosted.org/packages/42/bb/1463f0b1722b7f45431bdd468301991d1328b16cffe0b1c2918eba2c4eee/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07ae8a593e1c3c6b82ca3292efbe73c30b61332fd612e05abee07c79359f292f", size = 402932, upload-time = "2025-11-30T20:21:58.47Z" }, + { url = "https://files.pythonhosted.org/packages/99/ee/2520700a5c1f2d76631f948b0736cdf9b0acb25abd0ca8e889b5c62ac2e3/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12f90dd7557b6bd57f40abe7747e81e0c0b119bef015ea7726e69fe550e394a4", size = 525830, upload-time = "2025-11-30T20:21:59.699Z" }, + { url = "https://files.pythonhosted.org/packages/e0/ad/bd0331f740f5705cc555a5e17fdf334671262160270962e69a2bdef3bf76/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99b47d6ad9a6da00bec6aabe5a6279ecd3c06a329d4aa4771034a21e335c3a97", size = 412033, upload-time = "2025-11-30T20:22:00.991Z" }, + { url = "https://files.pythonhosted.org/packages/f8/1e/372195d326549bb51f0ba0f2ecb9874579906b97e08880e7a65c3bef1a99/rpds_py-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33f559f3104504506a44bb666b93a33f5d33133765b0c216a5bf2f1e1503af89", size = 390828, upload-time = "2025-11-30T20:22:02.723Z" }, + { url = "https://files.pythonhosted.org/packages/ab/2b/d88bb33294e3e0c76bc8f351a3721212713629ffca1700fa94979cb3eae8/rpds_py-0.30.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:946fe926af6e44f3697abbc305ea168c2c31d3e3ef1058cf68f379bf0335a78d", size = 404683, upload-time = "2025-11-30T20:22:04.367Z" }, + { url = "https://files.pythonhosted.org/packages/50/32/c759a8d42bcb5289c1fac697cd92f6fe01a018dd937e62ae77e0e7f15702/rpds_py-0.30.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:495aeca4b93d465efde585977365187149e75383ad2684f81519f504f5c13038", size = 421583, upload-time = "2025-11-30T20:22:05.814Z" }, + { url = "https://files.pythonhosted.org/packages/2b/81/e729761dbd55ddf5d84ec4ff1f47857f4374b0f19bdabfcf929164da3e24/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9a0ca5da0386dee0655b4ccdf46119df60e0f10da268d04fe7cc87886872ba7", size = 572496, upload-time = "2025-11-30T20:22:07.713Z" }, + { url = "https://files.pythonhosted.org/packages/14/f6/69066a924c3557c9c30baa6ec3a0aa07526305684c6f86c696b08860726c/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8d6d1cc13664ec13c1b84241204ff3b12f9bb82464b8ad6e7a5d3486975c2eed", size = 598669, upload-time = "2025-11-30T20:22:09.312Z" }, + { url = "https://files.pythonhosted.org/packages/5f/48/905896b1eb8a05630d20333d1d8ffd162394127b74ce0b0784ae04498d32/rpds_py-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3896fa1be39912cf0757753826bc8bdc8ca331a28a7c4ae46b7a21280b06bb85", size = 561011, upload-time = "2025-11-30T20:22:11.309Z" }, + { url = "https://files.pythonhosted.org/packages/22/16/cd3027c7e279d22e5eb431dd3c0fbc677bed58797fe7581e148f3f68818b/rpds_py-0.30.0-cp311-cp311-win32.whl", hash = "sha256:55f66022632205940f1827effeff17c4fa7ae1953d2b74a8581baaefb7d16f8c", size = 221406, upload-time = "2025-11-30T20:22:13.101Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5b/e7b7aa136f28462b344e652ee010d4de26ee9fd16f1bfd5811f5153ccf89/rpds_py-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:a51033ff701fca756439d641c0ad09a41d9242fa69121c7d8769604a0a629825", size = 236024, upload-time = "2025-11-30T20:22:14.853Z" }, + { url = "https://files.pythonhosted.org/packages/14/a6/364bba985e4c13658edb156640608f2c9e1d3ea3c81b27aa9d889fff0e31/rpds_py-0.30.0-cp311-cp311-win_arm64.whl", hash = "sha256:47b0ef6231c58f506ef0b74d44e330405caa8428e770fec25329ed2cb971a229", size = 229069, upload-time = "2025-11-30T20:22:16.577Z" }, + { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086, upload-time = "2025-11-30T20:22:17.93Z" }, + { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053, upload-time = "2025-11-30T20:22:19.297Z" }, + { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763, upload-time = "2025-11-30T20:22:21.661Z" }, + { url = "https://files.pythonhosted.org/packages/d4/36/eb2eb8515e2ad24c0bd43c3ee9cd74c33f7ca6430755ccdb240fd3144c44/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd", size = 408951, upload-time = "2025-11-30T20:22:23.408Z" }, + { url = "https://files.pythonhosted.org/packages/d6/65/ad8dc1784a331fabbd740ef6f71ce2198c7ed0890dab595adb9ea2d775a1/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f", size = 514622, upload-time = "2025-11-30T20:22:25.16Z" }, + { url = "https://files.pythonhosted.org/packages/63/8e/0cfa7ae158e15e143fe03993b5bcd743a59f541f5952e1546b1ac1b5fd45/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1", size = 414492, upload-time = "2025-11-30T20:22:26.505Z" }, + { url = "https://files.pythonhosted.org/packages/60/1b/6f8f29f3f995c7ffdde46a626ddccd7c63aefc0efae881dc13b6e5d5bb16/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23", size = 394080, upload-time = "2025-11-30T20:22:27.934Z" }, + { url = "https://files.pythonhosted.org/packages/6d/d5/a266341051a7a3ca2f4b750a3aa4abc986378431fc2da508c5034d081b70/rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6", size = 408680, upload-time = "2025-11-30T20:22:29.341Z" }, + { url = "https://files.pythonhosted.org/packages/10/3b/71b725851df9ab7a7a4e33cf36d241933da66040d195a84781f49c50490c/rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51", size = 423589, upload-time = "2025-11-30T20:22:31.469Z" }, + { url = "https://files.pythonhosted.org/packages/00/2b/e59e58c544dc9bd8bd8384ecdb8ea91f6727f0e37a7131baeff8d6f51661/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5", size = 573289, upload-time = "2025-11-30T20:22:32.997Z" }, + { url = "https://files.pythonhosted.org/packages/da/3e/a18e6f5b460893172a7d6a680e86d3b6bc87a54c1f0b03446a3c8c7b588f/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e", size = 599737, upload-time = "2025-11-30T20:22:34.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/e2/714694e4b87b85a18e2c243614974413c60aa107fd815b8cbc42b873d1d7/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394", size = 563120, upload-time = "2025-11-30T20:22:35.903Z" }, + { url = "https://files.pythonhosted.org/packages/6f/ab/d5d5e3bcedb0a77f4f613706b750e50a5a3ba1c15ccd3665ecc636c968fd/rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf", size = 223782, upload-time = "2025-11-30T20:22:37.271Z" }, + { url = "https://files.pythonhosted.org/packages/39/3b/f786af9957306fdc38a74cef405b7b93180f481fb48453a114bb6465744a/rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b", size = 240463, upload-time = "2025-11-30T20:22:39.021Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d2/b91dc748126c1559042cfe41990deb92c4ee3e2b415f6b5234969ffaf0cc/rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e", size = 230868, upload-time = "2025-11-30T20:22:40.493Z" }, + { url = "https://files.pythonhosted.org/packages/ed/dc/d61221eb88ff410de3c49143407f6f3147acf2538c86f2ab7ce65ae7d5f9/rpds_py-0.30.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f83424d738204d9770830d35290ff3273fbb02b41f919870479fab14b9d303b2", size = 374887, upload-time = "2025-11-30T20:22:41.812Z" }, + { url = "https://files.pythonhosted.org/packages/fd/32/55fb50ae104061dbc564ef15cc43c013dc4a9f4527a1f4d99baddf56fe5f/rpds_py-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7536cd91353c5273434b4e003cbda89034d67e7710eab8761fd918ec6c69cf8", size = 358904, upload-time = "2025-11-30T20:22:43.479Z" }, + { url = "https://files.pythonhosted.org/packages/58/70/faed8186300e3b9bdd138d0273109784eea2396c68458ed580f885dfe7ad/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2771c6c15973347f50fece41fc447c054b7ac2ae0502388ce3b6738cd366e3d4", size = 389945, upload-time = "2025-11-30T20:22:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/bd/a8/073cac3ed2c6387df38f71296d002ab43496a96b92c823e76f46b8af0543/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0a59119fc6e3f460315fe9d08149f8102aa322299deaa5cab5b40092345c2136", size = 407783, upload-time = "2025-11-30T20:22:46.103Z" }, + { url = "https://files.pythonhosted.org/packages/77/57/5999eb8c58671f1c11eba084115e77a8899d6e694d2a18f69f0ba471ec8b/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76fec018282b4ead0364022e3c54b60bf368b9d926877957a8624b58419169b7", size = 515021, upload-time = "2025-11-30T20:22:47.458Z" }, + { url = "https://files.pythonhosted.org/packages/e0/af/5ab4833eadc36c0a8ed2bc5c0de0493c04f6c06de223170bd0798ff98ced/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bef75a5525db97318e8cd061542b5a79812d711ea03dbc1f6f8dbb0c5f0d2", size = 414589, upload-time = "2025-11-30T20:22:48.872Z" }, + { url = "https://files.pythonhosted.org/packages/b7/de/f7192e12b21b9e9a68a6d0f249b4af3fdcdff8418be0767a627564afa1f1/rpds_py-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9027da1ce107104c50c81383cae773ef5c24d296dd11c99e2629dbd7967a20c6", size = 394025, upload-time = "2025-11-30T20:22:50.196Z" }, + { url = "https://files.pythonhosted.org/packages/91/c4/fc70cd0249496493500e7cc2de87504f5aa6509de1e88623431fec76d4b6/rpds_py-0.30.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:9cf69cdda1f5968a30a359aba2f7f9aa648a9ce4b580d6826437f2b291cfc86e", size = 408895, upload-time = "2025-11-30T20:22:51.87Z" }, + { url = "https://files.pythonhosted.org/packages/58/95/d9275b05ab96556fefff73a385813eb66032e4c99f411d0795372d9abcea/rpds_py-0.30.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a4796a717bf12b9da9d3ad002519a86063dcac8988b030e405704ef7d74d2d9d", size = 422799, upload-time = "2025-11-30T20:22:53.341Z" }, + { url = "https://files.pythonhosted.org/packages/06/c1/3088fc04b6624eb12a57eb814f0d4997a44b0d208d6cace713033ff1a6ba/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d4c2aa7c50ad4728a094ebd5eb46c452e9cb7edbfdb18f9e1221f597a73e1e7", size = 572731, upload-time = "2025-11-30T20:22:54.778Z" }, + { url = "https://files.pythonhosted.org/packages/d8/42/c612a833183b39774e8ac8fecae81263a68b9583ee343db33ab571a7ce55/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba81a9203d07805435eb06f536d95a266c21e5b2dfbf6517748ca40c98d19e31", size = 599027, upload-time = "2025-11-30T20:22:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/525a50f45b01d70005403ae0e25f43c0384369ad24ffe46e8d9068b50086/rpds_py-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:945dccface01af02675628334f7cf49c2af4c1c904748efc5cf7bbdf0b579f95", size = 563020, upload-time = "2025-11-30T20:22:58.2Z" }, + { url = "https://files.pythonhosted.org/packages/0b/5d/47c4655e9bcd5ca907148535c10e7d489044243cc9941c16ed7cd53be91d/rpds_py-0.30.0-cp313-cp313-win32.whl", hash = "sha256:b40fb160a2db369a194cb27943582b38f79fc4887291417685f3ad693c5a1d5d", size = 223139, upload-time = "2025-11-30T20:23:00.209Z" }, + { url = "https://files.pythonhosted.org/packages/f2/e1/485132437d20aa4d3e1d8b3fb5a5e65aa8139f1e097080c2a8443201742c/rpds_py-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:806f36b1b605e2d6a72716f321f20036b9489d29c51c91f4dd29a3e3afb73b15", size = 240224, upload-time = "2025-11-30T20:23:02.008Z" }, + { url = "https://files.pythonhosted.org/packages/24/95/ffd128ed1146a153d928617b0ef673960130be0009c77d8fbf0abe306713/rpds_py-0.30.0-cp313-cp313-win_arm64.whl", hash = "sha256:d96c2086587c7c30d44f31f42eae4eac89b60dabbac18c7669be3700f13c3ce1", size = 230645, upload-time = "2025-11-30T20:23:03.43Z" }, + { url = "https://files.pythonhosted.org/packages/ff/1b/b10de890a0def2a319a2626334a7f0ae388215eb60914dbac8a3bae54435/rpds_py-0.30.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:eb0b93f2e5c2189ee831ee43f156ed34e2a89a78a66b98cadad955972548be5a", size = 364443, upload-time = "2025-11-30T20:23:04.878Z" }, + { url = "https://files.pythonhosted.org/packages/0d/bf/27e39f5971dc4f305a4fb9c672ca06f290f7c4e261c568f3dea16a410d47/rpds_py-0.30.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:922e10f31f303c7c920da8981051ff6d8c1a56207dbdf330d9047f6d30b70e5e", size = 353375, upload-time = "2025-11-30T20:23:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/40/58/442ada3bba6e8e6615fc00483135c14a7538d2ffac30e2d933ccf6852232/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdc62c8286ba9bf7f47befdcea13ea0e26bf294bda99758fd90535cbaf408000", size = 383850, upload-time = "2025-11-30T20:23:07.825Z" }, + { url = "https://files.pythonhosted.org/packages/14/14/f59b0127409a33c6ef6f5c1ebd5ad8e32d7861c9c7adfa9a624fc3889f6c/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:47f9a91efc418b54fb8190a6b4aa7813a23fb79c51f4bb84e418f5476c38b8db", size = 392812, upload-time = "2025-11-30T20:23:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/66/e0be3e162ac299b3a22527e8913767d869e6cc75c46bd844aa43fb81ab62/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f3587eb9b17f3789ad50824084fa6f81921bbf9a795826570bda82cb3ed91f2", size = 517841, upload-time = "2025-11-30T20:23:11.186Z" }, + { url = "https://files.pythonhosted.org/packages/3d/55/fa3b9cf31d0c963ecf1ba777f7cf4b2a2c976795ac430d24a1f43d25a6ba/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39c02563fc592411c2c61d26b6c5fe1e51eaa44a75aa2c8735ca88b0d9599daa", size = 408149, upload-time = "2025-11-30T20:23:12.864Z" }, + { url = "https://files.pythonhosted.org/packages/60/ca/780cf3b1a32b18c0f05c441958d3758f02544f1d613abf9488cd78876378/rpds_py-0.30.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51a1234d8febafdfd33a42d97da7a43f5dcb120c1060e352a3fbc0c6d36e2083", size = 383843, upload-time = "2025-11-30T20:23:14.638Z" }, + { url = "https://files.pythonhosted.org/packages/82/86/d5f2e04f2aa6247c613da0c1dd87fcd08fa17107e858193566048a1e2f0a/rpds_py-0.30.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:eb2c4071ab598733724c08221091e8d80e89064cd472819285a9ab0f24bcedb9", size = 396507, upload-time = "2025-11-30T20:23:16.105Z" }, + { url = "https://files.pythonhosted.org/packages/4b/9a/453255d2f769fe44e07ea9785c8347edaf867f7026872e76c1ad9f7bed92/rpds_py-0.30.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6bdfdb946967d816e6adf9a3d8201bfad269c67efe6cefd7093ef959683c8de0", size = 414949, upload-time = "2025-11-30T20:23:17.539Z" }, + { url = "https://files.pythonhosted.org/packages/a3/31/622a86cdc0c45d6df0e9ccb6becdba5074735e7033c20e401a6d9d0e2ca0/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c77afbd5f5250bf27bf516c7c4a016813eb2d3e116139aed0096940c5982da94", size = 565790, upload-time = "2025-11-30T20:23:19.029Z" }, + { url = "https://files.pythonhosted.org/packages/1c/5d/15bbf0fb4a3f58a3b1c67855ec1efcc4ceaef4e86644665fff03e1b66d8d/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:61046904275472a76c8c90c9ccee9013d70a6d0f73eecefd38c1ae7c39045a08", size = 590217, upload-time = "2025-11-30T20:23:20.885Z" }, + { url = "https://files.pythonhosted.org/packages/6d/61/21b8c41f68e60c8cc3b2e25644f0e3681926020f11d06ab0b78e3c6bbff1/rpds_py-0.30.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4c5f36a861bc4b7da6516dbdf302c55313afa09b81931e8280361a4f6c9a2d27", size = 555806, upload-time = "2025-11-30T20:23:22.488Z" }, + { url = "https://files.pythonhosted.org/packages/f9/39/7e067bb06c31de48de3eb200f9fc7c58982a4d3db44b07e73963e10d3be9/rpds_py-0.30.0-cp313-cp313t-win32.whl", hash = "sha256:3d4a69de7a3e50ffc214ae16d79d8fbb0922972da0356dcf4d0fdca2878559c6", size = 211341, upload-time = "2025-11-30T20:23:24.449Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4d/222ef0b46443cf4cf46764d9c630f3fe4abaa7245be9417e56e9f52b8f65/rpds_py-0.30.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f14fc5df50a716f7ece6a80b6c78bb35ea2ca47c499e422aa4463455dd96d56d", size = 225768, upload-time = "2025-11-30T20:23:25.908Z" }, + { url = "https://files.pythonhosted.org/packages/86/81/dad16382ebbd3d0e0328776d8fd7ca94220e4fa0798d1dc5e7da48cb3201/rpds_py-0.30.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:68f19c879420aa08f61203801423f6cd5ac5f0ac4ac82a2368a9fcd6a9a075e0", size = 362099, upload-time = "2025-11-30T20:23:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/2b/60/19f7884db5d5603edf3c6bce35408f45ad3e97e10007df0e17dd57af18f8/rpds_py-0.30.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ec7c4490c672c1a0389d319b3a9cfcd098dcdc4783991553c332a15acf7249be", size = 353192, upload-time = "2025-11-30T20:23:29.151Z" }, + { url = "https://files.pythonhosted.org/packages/bf/c4/76eb0e1e72d1a9c4703c69607cec123c29028bff28ce41588792417098ac/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f251c812357a3fed308d684a5079ddfb9d933860fc6de89f2b7ab00da481e65f", size = 384080, upload-time = "2025-11-30T20:23:30.785Z" }, + { url = "https://files.pythonhosted.org/packages/72/87/87ea665e92f3298d1b26d78814721dc39ed8d2c74b86e83348d6b48a6f31/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac98b175585ecf4c0348fd7b29c3864bda53b805c773cbf7bfdaffc8070c976f", size = 394841, upload-time = "2025-11-30T20:23:32.209Z" }, + { url = "https://files.pythonhosted.org/packages/77/ad/7783a89ca0587c15dcbf139b4a8364a872a25f861bdb88ed99f9b0dec985/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3e62880792319dbeb7eb866547f2e35973289e7d5696c6e295476448f5b63c87", size = 516670, upload-time = "2025-11-30T20:23:33.742Z" }, + { url = "https://files.pythonhosted.org/packages/5b/3c/2882bdac942bd2172f3da574eab16f309ae10a3925644e969536553cb4ee/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e7fc54e0900ab35d041b0601431b0a0eb495f0851a0639b6ef90f7741b39a18", size = 408005, upload-time = "2025-11-30T20:23:35.253Z" }, + { url = "https://files.pythonhosted.org/packages/ce/81/9a91c0111ce1758c92516a3e44776920b579d9a7c09b2b06b642d4de3f0f/rpds_py-0.30.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e77dc9822d3ad616c3d5759ea5631a75e5809d5a28707744ef79d7a1bcfcad", size = 382112, upload-time = "2025-11-30T20:23:36.842Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8e/1da49d4a107027e5fbc64daeab96a0706361a2918da10cb41769244b805d/rpds_py-0.30.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:b4dc1a6ff022ff85ecafef7979a2c6eb423430e05f1165d6688234e62ba99a07", size = 399049, upload-time = "2025-11-30T20:23:38.343Z" }, + { url = "https://files.pythonhosted.org/packages/df/5a/7ee239b1aa48a127570ec03becbb29c9d5a9eb092febbd1699d567cae859/rpds_py-0.30.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4559c972db3a360808309e06a74628b95eaccbf961c335c8fe0d590cf587456f", size = 415661, upload-time = "2025-11-30T20:23:40.263Z" }, + { url = "https://files.pythonhosted.org/packages/70/ea/caa143cf6b772f823bc7929a45da1fa83569ee49b11d18d0ada7f5ee6fd6/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:0ed177ed9bded28f8deb6ab40c183cd1192aa0de40c12f38be4d59cd33cb5c65", size = 565606, upload-time = "2025-11-30T20:23:42.186Z" }, + { url = "https://files.pythonhosted.org/packages/64/91/ac20ba2d69303f961ad8cf55bf7dbdb4763f627291ba3d0d7d67333cced9/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:ad1fa8db769b76ea911cb4e10f049d80bf518c104f15b3edb2371cc65375c46f", size = 591126, upload-time = "2025-11-30T20:23:44.086Z" }, + { url = "https://files.pythonhosted.org/packages/21/20/7ff5f3c8b00c8a95f75985128c26ba44503fb35b8e0259d812766ea966c7/rpds_py-0.30.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:46e83c697b1f1c72b50e5ee5adb4353eef7406fb3f2043d64c33f20ad1c2fc53", size = 553371, upload-time = "2025-11-30T20:23:46.004Z" }, + { url = "https://files.pythonhosted.org/packages/72/c7/81dadd7b27c8ee391c132a6b192111ca58d866577ce2d9b0ca157552cce0/rpds_py-0.30.0-cp314-cp314-win32.whl", hash = "sha256:ee454b2a007d57363c2dfd5b6ca4a5d7e2c518938f8ed3b706e37e5d470801ed", size = 215298, upload-time = "2025-11-30T20:23:47.696Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d2/1aaac33287e8cfb07aab2e6b8ac1deca62f6f65411344f1433c55e6f3eb8/rpds_py-0.30.0-cp314-cp314-win_amd64.whl", hash = "sha256:95f0802447ac2d10bcc69f6dc28fe95fdf17940367b21d34e34c737870758950", size = 228604, upload-time = "2025-11-30T20:23:49.501Z" }, + { url = "https://files.pythonhosted.org/packages/e8/95/ab005315818cc519ad074cb7784dae60d939163108bd2b394e60dc7b5461/rpds_py-0.30.0-cp314-cp314-win_arm64.whl", hash = "sha256:613aa4771c99f03346e54c3f038e4cc574ac09a3ddfb0e8878487335e96dead6", size = 222391, upload-time = "2025-11-30T20:23:50.96Z" }, + { url = "https://files.pythonhosted.org/packages/9e/68/154fe0194d83b973cdedcdcc88947a2752411165930182ae41d983dcefa6/rpds_py-0.30.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7e6ecfcb62edfd632e56983964e6884851786443739dbfe3582947e87274f7cb", size = 364868, upload-time = "2025-11-30T20:23:52.494Z" }, + { url = "https://files.pythonhosted.org/packages/83/69/8bbc8b07ec854d92a8b75668c24d2abcb1719ebf890f5604c61c9369a16f/rpds_py-0.30.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a1d0bc22a7cdc173fedebb73ef81e07faef93692b8c1ad3733b67e31e1b6e1b8", size = 353747, upload-time = "2025-11-30T20:23:54.036Z" }, + { url = "https://files.pythonhosted.org/packages/ab/00/ba2e50183dbd9abcce9497fa5149c62b4ff3e22d338a30d690f9af970561/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d08f00679177226c4cb8c5265012eea897c8ca3b93f429e546600c971bcbae7", size = 383795, upload-time = "2025-11-30T20:23:55.556Z" }, + { url = "https://files.pythonhosted.org/packages/05/6f/86f0272b84926bcb0e4c972262f54223e8ecc556b3224d281e6598fc9268/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5965af57d5848192c13534f90f9dd16464f3c37aaf166cc1da1cae1fd5a34898", size = 393330, upload-time = "2025-11-30T20:23:57.033Z" }, + { url = "https://files.pythonhosted.org/packages/cb/e9/0e02bb2e6dc63d212641da45df2b0bf29699d01715913e0d0f017ee29438/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a4e86e34e9ab6b667c27f3211ca48f73dba7cd3d90f8d5b11be56e5dbc3fb4e", size = 518194, upload-time = "2025-11-30T20:23:58.637Z" }, + { url = "https://files.pythonhosted.org/packages/ee/ca/be7bca14cf21513bdf9c0606aba17d1f389ea2b6987035eb4f62bd923f25/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d3e6b26f2c785d65cc25ef1e5267ccbe1b069c5c21b8cc724efee290554419", size = 408340, upload-time = "2025-11-30T20:24:00.2Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c7/736e00ebf39ed81d75544c0da6ef7b0998f8201b369acf842f9a90dc8fce/rpds_py-0.30.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:626a7433c34566535b6e56a1b39a7b17ba961e97ce3b80ec62e6f1312c025551", size = 383765, upload-time = "2025-11-30T20:24:01.759Z" }, + { url = "https://files.pythonhosted.org/packages/4a/3f/da50dfde9956aaf365c4adc9533b100008ed31aea635f2b8d7b627e25b49/rpds_py-0.30.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:acd7eb3f4471577b9b5a41baf02a978e8bdeb08b4b355273994f8b87032000a8", size = 396834, upload-time = "2025-11-30T20:24:03.687Z" }, + { url = "https://files.pythonhosted.org/packages/4e/00/34bcc2565b6020eab2623349efbdec810676ad571995911f1abdae62a3a0/rpds_py-0.30.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe5fa731a1fa8a0a56b0977413f8cacac1768dad38d16b3a296712709476fbd5", size = 415470, upload-time = "2025-11-30T20:24:05.232Z" }, + { url = "https://files.pythonhosted.org/packages/8c/28/882e72b5b3e6f718d5453bd4d0d9cf8df36fddeb4ddbbab17869d5868616/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:74a3243a411126362712ee1524dfc90c650a503502f135d54d1b352bd01f2404", size = 565630, upload-time = "2025-11-30T20:24:06.878Z" }, + { url = "https://files.pythonhosted.org/packages/3b/97/04a65539c17692de5b85c6e293520fd01317fd878ea1995f0367d4532fb1/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3e8eeb0544f2eb0d2581774be4c3410356eba189529a6b3e36bbbf9696175856", size = 591148, upload-time = "2025-11-30T20:24:08.445Z" }, + { url = "https://files.pythonhosted.org/packages/85/70/92482ccffb96f5441aab93e26c4d66489eb599efdcf96fad90c14bbfb976/rpds_py-0.30.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:dbd936cde57abfee19ab3213cf9c26be06d60750e60a8e4dd85d1ab12c8b1f40", size = 556030, upload-time = "2025-11-30T20:24:10.956Z" }, + { url = "https://files.pythonhosted.org/packages/20/53/7c7e784abfa500a2b6b583b147ee4bb5a2b3747a9166bab52fec4b5b5e7d/rpds_py-0.30.0-cp314-cp314t-win32.whl", hash = "sha256:dc824125c72246d924f7f796b4f63c1e9dc810c7d9e2355864b3c3a73d59ade0", size = 211570, upload-time = "2025-11-30T20:24:12.735Z" }, + { url = "https://files.pythonhosted.org/packages/d0/02/fa464cdfbe6b26e0600b62c528b72d8608f5cc49f96b8d6e38c95d60c676/rpds_py-0.30.0-cp314-cp314t-win_amd64.whl", hash = "sha256:27f4b0e92de5bfbc6f86e43959e6edd1425c33b5e69aab0984a72047f2bcf1e3", size = 226532, upload-time = "2025-11-30T20:24:14.634Z" }, + { url = "https://files.pythonhosted.org/packages/69/71/3f34339ee70521864411f8b6992e7ab13ac30d8e4e3309e07c7361767d91/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c2262bdba0ad4fc6fb5545660673925c2d2a5d9e2e0fb603aad545427be0fc58", size = 372292, upload-time = "2025-11-30T20:24:16.537Z" }, + { url = "https://files.pythonhosted.org/packages/57/09/f183df9b8f2d66720d2ef71075c59f7e1b336bec7ee4c48f0a2b06857653/rpds_py-0.30.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:ee6af14263f25eedc3bb918a3c04245106a42dfd4f5c2285ea6f997b1fc3f89a", size = 362128, upload-time = "2025-11-30T20:24:18.086Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/5c2594e937253457342e078f0cc1ded3dd7b2ad59afdbf2d354869110a02/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3adbb8179ce342d235c31ab8ec511e66c73faa27a47e076ccc92421add53e2bb", size = 391542, upload-time = "2025-11-30T20:24:20.092Z" }, + { url = "https://files.pythonhosted.org/packages/49/5c/31ef1afd70b4b4fbdb2800249f34c57c64beb687495b10aec0365f53dfc4/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:250fa00e9543ac9b97ac258bd37367ff5256666122c2d0f2bc97577c60a1818c", size = 404004, upload-time = "2025-11-30T20:24:22.231Z" }, + { url = "https://files.pythonhosted.org/packages/e3/63/0cfbea38d05756f3440ce6534d51a491d26176ac045e2707adc99bb6e60a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9854cf4f488b3d57b9aaeb105f06d78e5529d3145b1e4a41750167e8c213c6d3", size = 527063, upload-time = "2025-11-30T20:24:24.302Z" }, + { url = "https://files.pythonhosted.org/packages/42/e6/01e1f72a2456678b0f618fc9a1a13f882061690893c192fcad9f2926553a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:993914b8e560023bc0a8bf742c5f303551992dcb85e247b1e5c7f4a7d145bda5", size = 413099, upload-time = "2025-11-30T20:24:25.916Z" }, + { url = "https://files.pythonhosted.org/packages/b8/25/8df56677f209003dcbb180765520c544525e3ef21ea72279c98b9aa7c7fb/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58edca431fb9b29950807e301826586e5bbf24163677732429770a697ffe6738", size = 392177, upload-time = "2025-11-30T20:24:27.834Z" }, + { url = "https://files.pythonhosted.org/packages/4a/b4/0a771378c5f16f8115f796d1f437950158679bcd2a7c68cf251cfb00ed5b/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:dea5b552272a944763b34394d04577cf0f9bd013207bc32323b5a89a53cf9c2f", size = 406015, upload-time = "2025-11-30T20:24:29.457Z" }, + { url = "https://files.pythonhosted.org/packages/36/d8/456dbba0af75049dc6f63ff295a2f92766b9d521fa00de67a2bd6427d57a/rpds_py-0.30.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ba3af48635eb83d03f6c9735dfb21785303e73d22ad03d489e88adae6eab8877", size = 423736, upload-time = "2025-11-30T20:24:31.22Z" }, + { url = "https://files.pythonhosted.org/packages/13/64/b4d76f227d5c45a7e0b796c674fd81b0a6c4fbd48dc29271857d8219571c/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:dff13836529b921e22f15cb099751209a60009731a68519630a24d61f0b1b30a", size = 573981, upload-time = "2025-11-30T20:24:32.934Z" }, + { url = "https://files.pythonhosted.org/packages/20/91/092bacadeda3edf92bf743cc96a7be133e13a39cdbfd7b5082e7ab638406/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:1b151685b23929ab7beec71080a8889d4d6d9fa9a983d213f07121205d48e2c4", size = 599782, upload-time = "2025-11-30T20:24:35.169Z" }, + { url = "https://files.pythonhosted.org/packages/d1/b7/b95708304cd49b7b6f82fdd039f1748b66ec2b21d6a45180910802f1abf1/rpds_py-0.30.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ac37f9f516c51e5753f27dfdef11a88330f04de2d564be3991384b2f3535d02e", size = 562191, upload-time = "2025-11-30T20:24:36.853Z" }, +] + +[[package]] +name = "sqlite-vec" +version = "0.1.7" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/50/7ad59cfd3003a2110cc366e526293de4c2520486f5ddaa8dc78b265f8d3e/sqlite_vec-0.1.7-py3-none-macosx_10_6_x86_64.whl", hash = "sha256:c34a136caecff4ae17d4c0cc268fcda89764ee870039caa21431e8e3fb2f4d48", size = 131171, upload-time = "2026-03-17T07:42:50.438Z" }, + { url = "https://files.pythonhosted.org/packages/e6/c9/1cd2f59b539096cd2ce6b540247b2dfe3c47ba04d9368b5e8e3dc86498d4/sqlite_vec-0.1.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6d272593d1b45ec7ea289b160ee6e5fafbaa6e1f5ba15f1305c012b0bda43653", size = 165434, upload-time = "2026-03-17T07:42:51.555Z" }, + { url = "https://files.pythonhosted.org/packages/75/91/30c3c382140dcc7bc6e3a07eac7ca610a2b5b70eb9bc7066dc3e7f748d58/sqlite_vec-0.1.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d27746d8e254a390bd15574aed899a0b9bb915b5321eb130a9c09722898cc03", size = 160076, upload-time = "2026-03-17T07:42:52.451Z" }, + { url = "https://files.pythonhosted.org/packages/59/56/6ff304d917ee79da769708dad0aed5fd34c72cbd0ae5e38bcc56cdc652a4/sqlite_vec-0.1.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux1_x86_64.whl", hash = "sha256:ad654283cb9c059852ce2d82018c757b06a705ada568f8b126022a131189818e", size = 163388, upload-time = "2026-03-17T07:42:53.516Z" }, + { url = "https://files.pythonhosted.org/packages/8b/27/fb1b6e3f9072854fe405f7aa99c46d4b465e84c9cec2ff7778edf29ecbbd/sqlite_vec-0.1.7-py3-none-win_amd64.whl", hash = "sha256:0c67877a87cb49426237b950237e82dbeb77778ab2ba89bea859f391fd169382", size = 292804, upload-time = "2026-03-17T07:42:54.325Z" }, +] + +[[package]] +name = "sse-starlette" +version = "3.3.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "starlette" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/8c/f9290339ef6d79badbc010f067cd769d6601ec11a57d78569c683fb4dd87/sse_starlette-3.3.4.tar.gz", hash = "sha256:aaf92fc067af8a5427192895ac028e947b484ac01edbc3caf00e7e7137c7bef1", size = 32427, upload-time = "2026-03-29T09:00:23.307Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/7f/3de5402f39890ac5660b86bcf5c03f9d855dad5c4ed764866d7b592b46fd/sse_starlette-3.3.4-py3-none-any.whl", hash = "sha256:84bb06e58939a8b38d8341f1bc9792f06c2b53f48c608dd207582b664fc8f3c1", size = 14330, upload-time = "2026-03-29T09:00:21.846Z" }, +] + +[[package]] +name = "starlette" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/81/69/17425771797c36cded50b7fe44e850315d039f28b15901ab44839e70b593/starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149", size = 2655289, upload-time = "2026-03-22T18:29:46.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/c9/584bc9651441b4ba60cc4d557d8a547b5aff901af35bda3a4ee30c819b82/starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b", size = 72651, upload-time = "2026-03-22T18:29:45.111Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.42.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e3/ad/4a96c425be6fb67e0621e62d86c402b4a17ab2be7f7c055d9bd2f638b9e2/uvicorn-0.42.0.tar.gz", hash = "sha256:9b1f190ce15a2dd22e7758651d9b6d12df09a13d51ba5bf4fc33c383a48e1775", size = 85393, upload-time = "2026-03-16T06:19:50.077Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/89/f8827ccff89c1586027a105e5630ff6139a64da2515e24dafe860bd9ae4d/uvicorn-0.42.0-py3-none-any.whl", hash = "sha256:96c30f5c7abe6f74ae8900a70e92b85ad6613b745d4879eb9b16ccad15645359", size = 68830, upload-time = "2026-03-16T06:19:48.325Z" }, +] + +[[package]] +name = "watchdog" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" }, + { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" }, + { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" }, + { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" }, + { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" }, + { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" }, + { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" }, + { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" }, + { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" }, + { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" }, + { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" }, + { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" }, + { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, +] From 4dc02963c1b17e542979fe1a8aa513e2b1c669f2 Mon Sep 17 00:00:00 2001 From: Paul Kyle Date: Sun, 19 Apr 2026 22:08:46 -0700 Subject: [PATCH 2/2] Keep only OpenClaw migration support public --- .gitignore | 2 +- docs/GIT-MEMORY.md | 6 +- docs/HOW-MEMORY-WORKS.md | 17 +- palinode/api/server.py | 15 -- palinode/cli/__init__.py | 3 +- palinode/cli/_api.py | 5 - palinode/cli/manage.py | 10 -- palinode/core/git_tools.py | 2 +- palinode/migration/__init__.py | 6 + palinode/migration/openclaw.py | 308 +++++++++++++++++++++++++++++++++ 10 files changed, 325 insertions(+), 49 deletions(-) create mode 100644 palinode/migration/__init__.py create mode 100644 palinode/migration/openclaw.py diff --git a/.gitignore b/.gitignore index 4e5e3b5..db30d69 100644 --- a/.gitignore +++ b/.gitignore @@ -10,7 +10,7 @@ insights/ daily/ archive/ inbox/ -migration/ +/migration/ research/ # ============================================================ diff --git a/docs/GIT-MEMORY.md b/docs/GIT-MEMORY.md index 0eea69b..1c10f17 100644 --- a/docs/GIT-MEMORY.md +++ b/docs/GIT-MEMORY.md @@ -21,15 +21,15 @@ palinode diff --days 7 Find out *when* and *why* a specific fact was recorded. Palinode's blame shows **two dates**: the git commit date (when the file was last touched) and the frontmatter origin date (when the memory was first captured). -This is critical for backfilled memories: a fact captured by Mem0 on February 11th and migrated to Palinode on March 29th shows both dates: +This is critical for imported memories: a fact captured in an external system on February 11th and migrated to Palinode on March 29th shows both dates: ```bash palinode blame projects/my-app-milestones.md --search "deploy" ``` ``` ## Blame: projects/my-app-milestones.md -Origin: 2026-02-11 | Source: mem0-backfill -Note: Git shows 2026-03-29 (migration date). True origin is 2026-02-11 (from mem0-backfill). +Origin: 2026-02-11 | Source: openclaw-migration +Note: Git shows 2026-03-29 (migration date). True origin is 2026-02-11 (from openclaw-migration). ^dcdbf5f (2026-03-29) - [2026-02-15] M5 Phase 1 complete: all 9 modules deployed ``` diff --git a/docs/HOW-MEMORY-WORKS.md b/docs/HOW-MEMORY-WORKS.md index 17a02ea..27f0faf 100644 --- a/docs/HOW-MEMORY-WORKS.md +++ b/docs/HOW-MEMORY-WORKS.md @@ -378,8 +378,8 @@ Apr 13 — Manual edit: Alice corrected a fact via palinode_save (commit def567 ```text ## Blame: projects/my-app-milestones.md -Origin: 2026-02-11 | Source: mem0-backfill -Note: Git shows 2026-03-29 (migration date). True origin is 2026-02-11 (from mem0-backfill). +Origin: 2026-02-11 | Source: openclaw-migration +Note: Git shows 2026-03-29 (migration date). True origin is 2026-02-11 (from openclaw-migration). ^dcdbf5f (2026-03-29) - [2026-02-11] M5 Phase 1 complete: all 9 modules deployed ^dcdbf5f (2026-03-29) - [2026-02-15] M2 closed: auth + notification systems @@ -387,7 +387,7 @@ abc1234 (2026-04-06) - M6 Phase 1 spec ready: routing fixed def5678 (2026-04-13) - M6 Phase 2: real-time sync live ```text -For backfilled memories, git blame shows when the file was migrated. The frontmatter `created_at` field preserves the true origin date from the source system (Mem0, QC MCP, etc.). Palinode surfaces both so you always know: +For imported memories, git blame shows when the file was migrated. The frontmatter `created_at` field preserves the true origin date from the source system. Palinode surfaces both so you always know: - **When the fact was first captured** (frontmatter `created_at`) - **When this file was last modified** (git blame date) @@ -397,16 +397,9 @@ For memories captured natively by Palinode (not backfilled), both dates match. **`git log --follow`** shows the complete history of a file — every consolidation, every manual edit, every backfill. -### Backfill Provenance +### Import Provenance -Palinode has already absorbed memories from two external systems: - -| Source | Memories | Classified By | Status | -| --- | --- | --- | --- | -| **Mem0** (Qdrant) | 4,637 → 3,645 (after dedup + skip) | Qwen 72B | ✅ Done | -| **QC MCP** (PostgreSQL) | 14,000+ contexts | TBD | Planned | - -Backfilled memories enter `palinode-data` with `source: "mem0-backfill"` in their frontmatter. As consolidation updates them, each change gets its own commit — gradually building provenance that Mem0 never had. +Imported memories enter `palinode-data` with a source marker in frontmatter, such as `source: "openclaw-migration"`. As consolidation updates them, each change gets its own commit — building a readable provenance trail on top of the imported material. ### Why This Matters diff --git a/palinode/api/server.py b/palinode/api/server.py index 6f495c3..833e3e8 100644 --- a/palinode/api/server.py +++ b/palinode/api/server.py @@ -1408,21 +1408,6 @@ def migrate_openclaw_api(req: MigrateOpenClawRequest) -> dict: raise HTTPException(status_code=500, detail=str(exc)) from exc -@app.post("/migrate/mem0") -def migrate_mem0_api() -> dict[str, str]: - """Run the Mem0 backfill pipeline. - - One-time migration: exports from Qdrant, deduplicates, classifies, - and generates Palinode markdown files. - """ - from palinode.migration.run_mem0_backfill import main as run_backfill - try: - run_backfill() - return {"status": "success", "message": "Mem0 backfill complete. Review files and reindex."} - except Exception as e: - raise _safe_500(e, "Backfill failed") - - def main() -> None: """Invokes Uvicorn CLI runner.""" import uvicorn diff --git a/palinode/cli/__init__.py b/palinode/cli/__init__.py index 43babf0..4b24353 100644 --- a/palinode/cli/__init__.py +++ b/palinode/cli/__init__.py @@ -9,7 +9,7 @@ from palinode.cli.consolidate import consolidate from palinode.cli.trigger import trigger from palinode.cli.doctor import doctor -from palinode.cli.manage import reindex, rebuild_fts, split_layers, bootstrap_ids, migrate_mem0 +from palinode.cli.manage import reindex, rebuild_fts, split_layers, bootstrap_ids from palinode.cli.git import blame, history, rollback, push from palinode.cli.query import entities from palinode.cli.session_end import session_end @@ -61,7 +61,6 @@ def banner() -> None: main.add_command(rebuild_fts) main.add_command(split_layers) main.add_command(bootstrap_ids) -main.add_command(migrate_mem0) # Git main.add_command(blame) diff --git a/palinode/cli/_api.py b/palinode/cli/_api.py index a16e017..9faa4c1 100644 --- a/palinode/cli/_api.py +++ b/palinode/cli/_api.py @@ -116,11 +116,6 @@ def migrate_openclaw(self, path: str, dry_run: bool = False): response.raise_for_status() return response.json() - def migrate_mem0(self): - response = self.client.post("/migrate/mem0", timeout=600.0) - response.raise_for_status() - return response.json() - def blame(self, file_path: str, search: str = None): params: dict = {} if search: diff --git a/palinode/cli/manage.py b/palinode/cli/manage.py index 78348db..6a78dfc 100644 --- a/palinode/cli/manage.py +++ b/palinode/cli/manage.py @@ -41,13 +41,3 @@ def bootstrap_ids(fmt): print_result(result, fmt=OutputFormat(fmt) if fmt else get_default_format()) except Exception as e: console.print(f"[red]Error bootstrapping IDs: {str(e)}[/red]") - -@click.command(name="migrate-mem0") -@click.option("--format", "fmt", type=click.Choice(["json", "text"]), help="Output format") -def migrate_mem0(fmt): - """Backfill from Mem0/Qdrant.""" - try: - result = api_client.migrate_mem0() - print_result(result, fmt=OutputFormat(fmt) if fmt else get_default_format()) - except Exception as e: - console.print(f"[red]Error migrating: {str(e)}[/red]") diff --git a/palinode/core/git_tools.py b/palinode/core/git_tools.py index 0f28c67..b000945 100644 --- a/palinode/core/git_tools.py +++ b/palinode/core/git_tools.py @@ -121,7 +121,7 @@ def blame(file_path: str, search: str | None = None) -> str: shows the true origin date. Output format: - [git: 2026-03-29, origin: 2026-02-11, source: mem0-backfill] content... + [git: 2026-03-29, origin: 2026-02-11, source: openclaw-migration] content... [git: 2026-04-06, origin: 2026-04-06, source: consolidation] content... Args: diff --git a/palinode/migration/__init__.py b/palinode/migration/__init__.py new file mode 100644 index 0000000..e14bf07 --- /dev/null +++ b/palinode/migration/__init__.py @@ -0,0 +1,6 @@ +""" +Palinode Migration Tools + +OpenClaw import support for converting MEMORY.md sections into +Palinode's typed markdown format. +""" diff --git a/palinode/migration/openclaw.py b/palinode/migration/openclaw.py new file mode 100644 index 0000000..ef1c3ef --- /dev/null +++ b/palinode/migration/openclaw.py @@ -0,0 +1,308 @@ +""" +OpenClaw Memory Migration + +Parses MEMORY.md (OpenClaw's flat memory format) into structured Palinode +markdown files, one file per ## section, with heuristic type detection. + +Type heuristics (in priority order): + person — section mentions people names / "who" + decision — section contains "decided", "chose", "because" + project — section mentions projects / tasks + insight — everything else +""" +from __future__ import annotations + +import hashlib +import logging +import os +import re +import subprocess +from datetime import UTC, datetime +from collections.abc import Callable +from typing import Any + +import yaml + +from palinode.core.config import config +from palinode.core.parser import slugify as _base_slugify + +logger = logging.getLogger("palinode.migration.openclaw") + +# ── Type-detection keywords ─────────────────────────────────────────────────── + +_PERSON_KEYWORDS = re.compile( + r"\b(who|person|people|colleague|friend|user|team|member|contact)\b", + re.IGNORECASE, +) +_DECISION_KEYWORDS = re.compile( + r"\b(decided|decide|chose|choose|because|rationale|reasoning|resolution)\b", + re.IGNORECASE, +) +_PROJECT_KEYWORDS = re.compile( + r"\b(project|task|sprint|milestone|backlog|epic|ticket|issue|feature|roadmap)\b", + re.IGNORECASE, +) + +# Subdirectory for each type +_TYPE_DIR: dict[str, str] = { + "person": "people", + "decision": "decisions", + "project": "projects", + "insight": "insights", +} + + +def _detect_type(heading: str, body: str) -> str: + """Return a memory type string based on heading + body heuristics. + + Uses a scoring approach: each keyword match adds a point for that type. + The heading is weighted 3× more than the body to reflect its signal + strength. Ties are broken by priority: person > decision > project. + """ + scores: dict[str, int] = {"person": 0, "decision": 0, "project": 0} + for text, weight in ((heading, 3), (body, 1)): + scores["person"] += len(_PERSON_KEYWORDS.findall(text)) * weight + scores["decision"] += len(_DECISION_KEYWORDS.findall(text)) * weight + scores["project"] += len(_PROJECT_KEYWORDS.findall(text)) * weight + + # Pick highest-scoring type; priority order breaks ties + best = max(("person", "decision", "project"), key=lambda t: scores[t]) + if scores[best] == 0: + return "insight" + return best + + +def _slugify(text: str) -> str: + """Convert a heading to a filesystem-safe slug (max 60 chars).""" + slug = _base_slugify(text) + return slug[:60] or "section" + + +def _validate_source_path(path: str) -> str: + """Return a resolved, validated absolute path to the MEMORY.md source. + + Raises ValueError for null bytes, path traversal, and symlinks that + escape the resolved path. + """ + if "\x00" in path: + raise ValueError("Null bytes are not allowed in path") + resolved = os.path.realpath(os.path.abspath(path)) + # Reject if any path component is ".." (belt-and-suspenders) + if ".." in path.split(os.sep): + raise ValueError("Path traversal is not allowed") + return resolved + + +def _sha256(content: str) -> str: + return hashlib.sha256(content.encode(), usedforsecurity=False).hexdigest() + + +def _existing_hashes(memory_dir: str) -> set[str]: + """Collect SHA-256 hashes of all existing .md files for dedup.""" + hashes: set[str] = set() + for root, _dirs, files in os.walk(memory_dir): + for fname in files: + if not fname.endswith(".md"): + continue + fpath = os.path.join(root, fname) + try: + with open(fpath, "r", encoding="utf-8") as f: + hashes.add(_sha256(f.read())) + except OSError: + pass + return hashes + + +def _parse_raw(raw: str) -> list[dict[str, Any]]: + """Parse raw MEMORY.md text into section dicts (no I/O).""" + sections: list[dict[str, Any]] = [] + # parts[0] is any content before the first ## heading — skip it. + parts = re.split(r"^##\s+", raw, flags=re.MULTILINE) + for part in parts[1:]: + if not part.strip(): + continue + lines = part.splitlines() + heading = lines[0].strip() + body = "\n".join(lines[1:]).strip() + if not heading: + continue + sections.append( + { + "heading": heading, + "body": body, + "type": _detect_type(heading, body), + "slug": _slugify(heading), + } + ) + return sections + + +def parse_memory_md(source_path: str) -> list[dict[str, Any]]: + """Parse a MEMORY.md file into a list of section dicts. + + Each dict contains: + heading: str — the ## heading text + body: str — the section body (stripped) + type: str — detected memory type + slug: str — filesystem-safe slug derived from heading + """ + validated = _validate_source_path(source_path) + with open(validated, "r", encoding="utf-8") as f: + raw = f.read() + return _parse_raw(raw) + + +def _build_file_content( + section: dict[str, Any], + now_iso: str, + source_path: str, +) -> tuple[str, str]: + """Return (relative_path, file_content) for a section. + + relative_path is relative to memory_dir. + """ + mem_type = section["type"] + subdir = _TYPE_DIR[mem_type] + slug = section["slug"] + rel_path = f"{subdir}/{slug}.md" + + frontmatter = { + "id": f"{subdir}-{slug}", + "category": subdir, + "name": section["heading"], + "last_updated": now_iso, + "source": "openclaw-migration", + "source_file": os.path.basename(source_path), + } + fm_str = yaml.dump(frontmatter, default_flow_style=False, allow_unicode=True) + content = f"---\n{fm_str}---\n\n# {section['heading']}\n\n{section['body']}\n" + return rel_path, content + + +def _git_commit(memory_dir: str, staged_files: list[str], log_file: str | None) -> None: + """Stage and commit migrated files in the memory repo.""" + files_to_add = [f for f in staged_files + ([log_file] if log_file else []) if f] + if not files_to_add: + return + subprocess.run( + ["git", "add", "--"] + files_to_add, + cwd=memory_dir, + check=False, + capture_output=True, + ) + date_str = datetime.now(UTC).strftime("%Y-%m-%d") + msg = ( + f"palinode migrate openclaw: import {len(staged_files)} sections " + f"from MEMORY.md ({date_str})" + ) + subprocess.run( + ["git", "commit", "-m", msg], + cwd=memory_dir, + check=False, + capture_output=True, + ) + + +def run_migration( + source_path: str, + dry_run: bool = False, + review_callback: Callable[[list[dict[str, Any]]], list[dict[str, Any]]] | None = None, +) -> dict[str, Any]: + """Import a MEMORY.md file into Palinode. + + Args: + source_path: Path to the MEMORY.md file to import. + dry_run: If True, parse and report without writing any files. + review_callback: Optional function that receives the parsed section list + and returns a (possibly modified) list. Sections can have their + ``type`` changed or be removed entirely. Called after parsing, + before any file I/O. + + Returns: + dict with keys: + sections_found: int + files_created: list[str] — relative paths written + files_skipped: list[str] — relative paths skipped (dedup) + log_file: str | None — relative path of the migration log + dry_run: bool + """ + memory_dir = os.path.realpath(config.memory_dir) + validated_source = _validate_source_path(source_path) + + with open(validated_source, "r", encoding="utf-8") as f: + sections = _parse_raw(f.read()) + + if review_callback is not None: + sections = review_callback(sections) + + now_iso = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%SZ") + + existing_hashes: set[str] = set() if dry_run else _existing_hashes(memory_dir) + + files_created: list[str] = [] + files_skipped: list[str] = [] + written_abs: list[str] = [] + + for section in sections: + rel_path, content = _build_file_content(section, now_iso, validated_source) + content_hash = _sha256(content) + + if content_hash in existing_hashes: + files_skipped.append(rel_path) + continue + + if dry_run: + files_created.append(rel_path) + continue + + abs_path = os.path.join(memory_dir, rel_path) + os.makedirs(os.path.dirname(abs_path), exist_ok=True) + with open(abs_path, "w", encoding="utf-8") as f: + f.write(content) + + existing_hashes.add(content_hash) + files_created.append(rel_path) + written_abs.append(abs_path) + logger.info(f"Created {rel_path}") + + # Write migration log + log_rel: str | None = None + log_abs: str | None = None + if not dry_run and (files_created or files_skipped): + date_str = datetime.now(UTC).strftime("%Y-%m-%d") + log_rel = f"migrations/openclaw-{date_str}.md" + log_abs = os.path.join(memory_dir, log_rel) + os.makedirs(os.path.dirname(log_abs), exist_ok=True) + + log_lines = [ + f"# OpenClaw Migration — {date_str}", + "", + f"Source: `{os.path.basename(validated_source)}`", + f"Sections found: {len(sections)}", + f"Files created: {len(files_created)}", + f"Files skipped (dedup): {len(files_skipped)}", + "", + "## Created", + "", + ] + for fp in files_created: + log_lines.append(f"- `{fp}`") + if files_skipped: + log_lines.extend(["", "## Skipped (duplicate content)", ""]) + for fp in files_skipped: + log_lines.append(f"- `{fp}`") + log_lines.append("") + + with open(log_abs, "w", encoding="utf-8") as f: + f.write("\n".join(log_lines)) + + if not dry_run and written_abs: + _git_commit(memory_dir, written_abs, log_abs) + + return { + "sections_found": len(sections), + "files_created": files_created, + "files_skipped": files_skipped, + "log_file": log_rel, + "dry_run": dry_run, + }