Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -396,6 +396,8 @@ list_directory(dir_name, depth) - Browse directory contents with filtering
**Search & Discovery:**
```
search(query, page, page_size) - Search across your knowledge base
search_notes(query, page, page_size, search_type, types, entity_types, after_date, metadata_filters, tags, status, project) - Search with filters
search_by_metadata(filters, limit, offset, project) - Structured frontmatter search
```

**Project Management:**
Expand Down
17 changes: 15 additions & 2 deletions docs/ARCHITECTURE.md
Original file line number Diff line number Diff line change
Expand Up @@ -214,15 +214,28 @@ Example tool using typed client:

```python
@mcp.tool()
async def search_notes(query: str, project: str | None = None) -> SearchResponse:
async def search_notes(
query: str,
project: str | None = None,
metadata_filters: dict | None = None,
tags: list[str] | None = None,
status: str | None = None,
) -> SearchResponse:
async with get_client() as client:
active_project = await get_active_project(client, project)

# Import client inside function to avoid circular imports
from basic_memory.mcp.clients import SearchClient
from basic_memory.schemas.search import SearchQuery

search_query = SearchQuery(
text=query,
metadata_filters=metadata_filters,
tags=tags,
status=status,
)
search_client = SearchClient(client, active_project.external_id)
return await search_client.search(query)
return await search_client.search(search_query.model_dump())
```

## Sync Coordination
Expand Down
50 changes: 49 additions & 1 deletion docs/ai-assistant-guide-extended.md
Original file line number Diff line number Diff line change
Expand Up @@ -1038,6 +1038,35 @@ recent_decisions = await search_notes(
)
```

**Structured frontmatter filters**:

```python
# Filter by tags and status
results = await search_notes(
query="authentication",
tags=["security"],
status="in-progress",
project="main"
)

# Complex metadata filters (supports $in, $gt, $gte, $lt, $lte, $between)
results = await search_notes(
query="api design",
metadata_filters={
"type": "spec",
"priority": {"$in": ["high", "critical"]},
"tags": ["architecture"]
},
project="main"
)

# Metadata-only search
results = await search_by_metadata(
filters={"type": "spec", "status": "in-progress"},
project="main"
)
```

### Search Types

**Text search (default)**:
Expand Down Expand Up @@ -2861,7 +2890,7 @@ contents = await list_directory(

### Search & Discovery

**search_notes(query, page, page_size, search_type, types, entity_types, after_date, project)**
**search_notes(query, page, page_size, search_type, types, entity_types, after_date, metadata_filters, tags, status, project)**
- Search across knowledge base
- Parameters:
- `query` (required): Search query
Expand All @@ -2871,6 +2900,9 @@ contents = await list_directory(
- `types` (optional): Entity type filter
- `entity_types` (optional): Observation category filter
- `after_date` (optional): Date filter (ISO format)
- `metadata_filters` (optional): Structured frontmatter filters (dict)
- `tags` (optional): Frontmatter tags filter (list)
- `status` (optional): Frontmatter status filter (string)
- `project` (required unless default_project_mode): Target project
- Returns: Matching entities with scores
- Example:
Expand All @@ -2883,6 +2915,22 @@ results = await search_notes(
)
```

**search_by_metadata(filters, limit, offset, project)**
- Metadata-only search using structured frontmatter
- Parameters:
- `filters` (required): Dict of field -> value (supports $in, $gt/$gte/$lt/$lte, $between)
- `limit` (optional): Max results (default: 20)
- `offset` (optional): Pagination offset (default: 0)
- `project` (required unless default_project_mode): Target project
- Returns: Matching entities
- Example:
```python
results = await search_by_metadata(
filters={"type": "spec", "status": "in-progress"},
project="main"
)
```

### Project Management

**list_memory_projects()**
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
"""Add structured metadata indexes for entity frontmatter

Revision ID: d7e8f9a0b1c2
Revises: g9a0b3c4d5e6
Create Date: 2026-01-31 12:00:00.000000

"""

from typing import Sequence, Union

import sqlalchemy as sa
from alembic import op
from sqlalchemy import text


def column_exists(connection, table: str, column: str) -> bool:
"""Check if a column exists in a table (idempotent migration support)."""
if connection.dialect.name == "postgresql":
result = connection.execute(
text(
"SELECT 1 FROM information_schema.columns "
"WHERE table_name = :table AND column_name = :column"
),
{"table": table, "column": column},
)
return result.fetchone() is not None
# SQLite
result = connection.execute(text(f"PRAGMA table_info({table})"))
columns = [row[1] for row in result]
return column in columns


def index_exists(connection, index_name: str) -> bool:
"""Check if an index exists (idempotent migration support)."""
if connection.dialect.name == "postgresql":
result = connection.execute(
text("SELECT 1 FROM pg_indexes WHERE indexname = :index_name"),
{"index_name": index_name},
)
return result.fetchone() is not None
# SQLite
result = connection.execute(
text("SELECT 1 FROM sqlite_master WHERE type='index' AND name = :index_name"),
{"index_name": index_name},
)
return result.fetchone() is not None


# revision identifiers, used by Alembic.
revision: str = "d7e8f9a0b1c2"
down_revision: Union[str, None] = "6830751f5fb6"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
"""Add JSONB/GiN indexes for Postgres and generated columns for SQLite."""
connection = op.get_bind()
dialect = connection.dialect.name

if dialect == "postgresql":
# Ensure JSONB for efficient indexing
result = connection.execute(
text(
"SELECT data_type FROM information_schema.columns "
"WHERE table_name = 'entity' AND column_name = 'entity_metadata'"
)
).fetchone()
if result and result[0] != "jsonb":
op.execute(
"ALTER TABLE entity ALTER COLUMN entity_metadata "
"TYPE jsonb USING entity_metadata::jsonb"
)

# General JSONB GIN index
op.execute(
"CREATE INDEX IF NOT EXISTS idx_entity_metadata_gin "
"ON entity USING GIN (entity_metadata jsonb_path_ops)"
)

# Common field indexes
op.execute(
"CREATE INDEX IF NOT EXISTS idx_entity_tags_json "
"ON entity USING GIN ((entity_metadata -> 'tags'))"
)
op.execute(
"CREATE INDEX IF NOT EXISTS idx_entity_frontmatter_type "
"ON entity ((entity_metadata ->> 'type'))"
)
op.execute(
"CREATE INDEX IF NOT EXISTS idx_entity_frontmatter_status "
"ON entity ((entity_metadata ->> 'status'))"
)
return

# SQLite: add generated columns for common frontmatter fields
if not column_exists(connection, "entity", "tags_json"):
op.add_column(
"entity",
sa.Column(
"tags_json",
sa.Text(),
sa.Computed("json_extract(entity_metadata, '$.tags')", persisted=True),
),
)
if not column_exists(connection, "entity", "frontmatter_status"):
op.add_column(
"entity",
sa.Column(
"frontmatter_status",
sa.Text(),
sa.Computed("json_extract(entity_metadata, '$.status')", persisted=True),
),
)
if not column_exists(connection, "entity", "frontmatter_type"):
op.add_column(
"entity",
sa.Column(
"frontmatter_type",
sa.Text(),
sa.Computed("json_extract(entity_metadata, '$.type')", persisted=True),
),
)

# Index generated columns
if not index_exists(connection, "idx_entity_tags_json"):
op.create_index("idx_entity_tags_json", "entity", ["tags_json"])
if not index_exists(connection, "idx_entity_frontmatter_status"):
op.create_index("idx_entity_frontmatter_status", "entity", ["frontmatter_status"])
if not index_exists(connection, "idx_entity_frontmatter_type"):
op.create_index("idx_entity_frontmatter_type", "entity", ["frontmatter_type"])


def downgrade() -> None:
"""Best-effort downgrade (drop indexes, revert JSONB on Postgres)."""
connection = op.get_bind()
dialect = connection.dialect.name

if dialect == "postgresql":
op.execute("DROP INDEX IF EXISTS idx_entity_frontmatter_status")
op.execute("DROP INDEX IF EXISTS idx_entity_frontmatter_type")
op.execute("DROP INDEX IF EXISTS idx_entity_tags_json")
op.execute("DROP INDEX IF EXISTS idx_entity_metadata_gin")
op.execute(
"ALTER TABLE entity ALTER COLUMN entity_metadata TYPE json USING entity_metadata::json"
)
return

# SQLite: drop indexes (dropping generated columns requires table rebuild)
op.execute("DROP INDEX IF EXISTS idx_entity_frontmatter_status")
op.execute("DROP INDEX IF EXISTS idx_entity_frontmatter_type")
op.execute("DROP INDEX IF EXISTS idx_entity_tags_json")
28 changes: 4 additions & 24 deletions src/basic_memory/api/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,6 @@

from basic_memory import __version__ as version
from basic_memory.api.container import ApiContainer, set_container
from basic_memory.api.routers import (
directory_router,
importer_router,
knowledge,
management,
memory,
project,
resource,
search,
prompt_router,
)
from basic_memory.api.v2.routers import (
knowledge_router as v2_knowledge,
project_router as v2_project,
Expand Down Expand Up @@ -90,19 +79,10 @@ async def lifespan(app: FastAPI): # pragma: no cover
app.include_router(v2_importer, prefix="/v2/projects/{project_id}")
app.include_router(v2_project, prefix="/v2")

# Include v1 routers (/{project} is a catch-all, must come after specific prefixes)
app.include_router(knowledge.router, prefix="/{project}")
app.include_router(memory.router, prefix="/{project}")
app.include_router(resource.router, prefix="/{project}")
app.include_router(search.router, prefix="/{project}")
app.include_router(project.project_router, prefix="/{project}")
app.include_router(directory_router.router, prefix="/{project}")
app.include_router(prompt_router.router, prefix="/{project}")
app.include_router(importer_router.router, prefix="/{project}")

# Project resource router works across projects
app.include_router(project.project_resource_router)
app.include_router(management.router)
# Legacy web app proxy paths (compat with /proxy/projects/projects)
app.include_router(v2_project, prefix="/proxy/projects")

# V2 routers are the only public API surface


@app.exception_handler(Exception)
Expand Down
11 changes: 0 additions & 11 deletions src/basic_memory/api/routers/__init__.py

This file was deleted.

Loading
Loading