Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ AZURE_OPENAI_API_VERSION=

# Additional info for Knowledge
AZURE_AI_SEARCH_ENDPOINT=https://<your-search-service-name>.search.windows.net/
AZURE_AI_SEARCH_API_VERSION=2025-03-01-preview
AZURE_AI_SEARCH_API_VERSION=2025-05-01-preview
SEARCH_AUTHENTICATION_METHOD=api-search-key
AZURE_AI_SEARCH_API_KEY=your_api_key

Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ A Model Context Protocol server for Azure AI Foundry, providing a unified set of
| Category | Tool | Description |
|---|---|---|
| **Index** | `list_index_names` | Retrieve all names of indexes from the AI Search Service |
| | `list_indexes_with_descriptions` | Retrieve the names and descriptions of all indexes from the AI Search Service |
| | `list_index_schemas` | Retrieve all index schemas from the AI Search Service |
| | `retrieve_index_schema` | Retrieve the schema for a specific index from the AI Search Service |
| | `create_index` | Creates a new index |
Expand Down Expand Up @@ -172,7 +173,7 @@ See [example .env file](./clients/python/pydantic-ai/.env.example) for a sample
| -------------- | ----------------------------- | ---------------------------------- | ------------------------------------------------ |
| **Model** | `GITHUB_TOKEN` | No | GitHub token for testing models for free with rate limits. |
| **Knowledge** | `AZURE_AI_SEARCH_ENDPOINT` | Always | The endpoint URL for your Azure AI Search service. It should look like this: `https://<your-search-service-name>.search.windows.net/`. |
| | `AZURE_AI_SEARCH_API_VERSION` | No | API Version to use. Defaults to `2025-03-01-preview`. |
| | `AZURE_AI_SEARCH_API_VERSION` | No | API Version to use. Defaults to `2025-05-01-preview`. |
| | `SEARCH_AUTHENTICATION_METHOD`| Always | `service-principal` or `api-search-key`. |
| | `AZURE_TENANT_ID` | Yes when using `service-principal` | The ID of your Azure Active Directory tenant. |
| | `AZURE_CLIENT_ID` | Yes when using `service-principal` | The ID of your Service Principal (app registration) |
Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ dependencies = [
"azure-mgmt-cognitiveservices>=13.0.0",
"azure-identity>=1.0",
"jinja2~=3.0",
"azure-search-documents>=11.5.2",
"azure-search-documents>=11.7.0b1",
"python-dotenv>=1.0.1",
"azure-cli>=2.60.0",
"azure-ai-evaluation>=1.9.0",
"azure-ai-projects>=1.0.0b11"
Expand Down
15 changes: 15 additions & 0 deletions src/mcp_foundry/mcp_foundry_knowledge/data_access_objects/dao.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,21 @@ def retrieve_index_names(self) -> list[str]:

return results

def retrieve_indexes_with_descriptions(self) -> list[MutableMapping[str, Any]]:
"""
Retrieves the names and descriptions of all search indexes.

Returns:
list[MutableMapping[str, Any]]: A list of dictionaries, each containing 'name' and 'description'
keys for each search index. Description may be None if not set.
"""
search_results: ItemPaged[SearchIndex] = self.client.list_indexes()

return [
{'name': index.name, 'description': index.description}
for index in search_results
]

def retrieve_index_schemas(self) -> list[MutableMapping[str, Any]]:
"""
Retrieves the full schema definition for each search index.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ class ScoringProfileSchema(BaseModel):

class SearchIndexSchema(BaseModel):
name: str
description: Optional[str] = None
fields: List[SearchFieldSchema]
suggesters: Optional[List[SuggesterSchema]] = None
scoring_profiles: Optional[List[ScoringProfileSchema]] = None
Expand All @@ -64,6 +65,7 @@ def convert_pydantic_model_to_search_index(schema: SearchIndexSchema) -> SearchI

return SearchIndex(
name=schema.name,
description=schema.description,
fields=fields,
suggesters=suggesters or None,
scoring_profiles=schema.scoring_profiles,
Expand Down Expand Up @@ -92,4 +94,3 @@ def convert_to_field_mappings(models: List[FieldMappingModel]) -> List[FieldMapp
)
for model in models
]

12 changes: 12 additions & 0 deletions src/mcp_foundry/mcp_foundry_knowledge/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,18 @@ async def list_index_names() -> list[str]:
dao = SearchIndexDao()
return dao.retrieve_index_names()

@mcp.tool(description="Retrieves the names and descriptions of all indexes")
async def list_indexes_with_descriptions() -> list[dict[str, str | None]]:
"""
Retrieves the names and descriptions of all indexes.

Returns:
list[dict[str, str | None]]: A list of dictionaries, each containing 'name' and 'description'
keys for each search index. Description may be None if not set.
"""
dao = SearchIndexDao()
return dao.retrieve_indexes_with_descriptions()

@mcp.tool(description="Retrieves the schemas for all indexes ")
async def list_index_schemas() -> list[OperationResult]:
"""
Expand Down
52 changes: 52 additions & 0 deletions tests/test_knowledge_dao.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
import pytest

from mcp_foundry.mcp_foundry_knowledge.data_access_objects.dao import SearchIndexDao
from mcp_foundry.mcp_foundry_knowledge.data_access_objects.models import (
SearchIndexSchema,
SearchFieldSchema,
convert_pydantic_model_to_search_index,
)


def test_retrieve_indexes_with_descriptions(monkeypatch):
class FakeIndex:
def __init__(self, name: str, description: str | None):
self.name = name
self.description = description

class FakeClient:
def list_indexes(self):
return [
FakeIndex("idx1", "Description 1"),
FakeIndex("idx2", None),
]

def fake_init(self):
self.client = FakeClient()

monkeypatch.setattr(SearchIndexDao, "__init__", fake_init, raising=False)

dao = SearchIndexDao()
results = dao.retrieve_indexes_with_descriptions()

assert results == [
{"name": "idx1", "description": "Description 1"},
{"name": "idx2", "description": None},
]


def test_convert_pydantic_model_to_search_index_description_passthrough():
schema = SearchIndexSchema(
name="test-index",
description="My test index",
fields=[
SearchFieldSchema(name="id", type="Edm.String", key=True),
],
)

search_index = convert_pydantic_model_to_search_index(schema)

assert getattr(search_index, "description", None) == "My test index"
assert search_index.name == "test-index"
assert hasattr(search_index, "fields") and len(search_index.fields) == 1

Loading