Thursday, June 19, 2025

 In continuation of the previous posts, this shows how to deploy an agent to search our drone images and detected objects for user queries: 

 

from azure.search.documents.indexes import SearchIndexClient  

 

from azure.search.documents.indexes.models import (  

    KnowledgeAgent 

    KnowledgeAgentAzureOpenAIModel 

    KnowledgeAgentRequestLimits 

    KnowledgeAgentTargetIndex, 

    AzureOpenAIVectorizerParameters 

) 

from azure.ai.agents.models import AzureAISearchTool, AzureAISearchQueryType, MessageRole, ListSortOrder 

 

from azure.ai.agents import AgentsClient 

from dotenv import load_dotenv 

from azure.identity import DefaultAzureCredential, get_bearer_token_provider 

from azure.core.credentials import AzureKeyCredential 

import os 

 

load_dotenv(override=True) 

 

project_endpoint = os.environ["AZURE_PROJECT_ENDPOINT"] 

project_api_key = os.environ["AZURE_PROJECT_API_KEY"] 

agent_model = os.getenv("AZURE_AGENT_MODEL", "gpt-4o-mini") 

search_endpoint = os.environ["AZURE_SEARCH_SERVICE_ENDPOINT"] 

api_version = os.getenv("AZURE_SEARCH_API_VERSION") 

search_api_key = os.getenv("AZURE_SEARCH_ADMIN_KEY") 

credential = AzureKeyCredential(search_api_key) 

token_provider = get_bearer_token_provider(DefaultAzureCredential(), "https://search.azure.com/.default") 

index_name = os.getenv("AZURE_SEARCH_02_INDEX_NAME", "index02") 

azure_openai_endpoint = os.environ["AZURE_OPENAI_ENDPOINT"] 

azure_openai_api_key = os.getenv("AZURE_OPENAI_API_KEY") 

azure_openai_gpt_deployment = os.getenv("AZURE_OPENAI_GPT_DEPLOYMENT", "gpt-4o-mini") 

azure_openai_gpt_model = os.getenv("AZURE_OPENAI_GPT_MODEL", "gpt-4o-mini") 

azure_openai_embedding_deployment = os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT", "text-embedding-ada-002") 

azure_openai_embedding_model = os.getenv("AZURE_OPENAI_EMBEDDING_MODEL", "text-embedding-ada-002") 

agent_name = os.getenv("AZURE_SEARCH_AGENT_NAME", "objects-search-agent") 

api_version = "2025-05-01-Preview" 

agent_max_output_tokens=10000 

 

# The search_tool object can now be used within an Azure AI project, 

# typically as part of an agent or flow, to perform search operations 

# against the specified Azure AI Search index. 

# For example, if you are building an agent, this tool could be invoked 

# when the agent needs to retrieve information from your search index. 

agents_client = AgentsClient(endpoint=azure_openai_endpoint, credential=DefaultAzureCredential()) 

index_client = SearchIndexClient(endpoint=search_endpoint, credential=AzureKeyCredential(search_api_key))  

instructions = """ 

You are an AI assistant that answers questions about the stored and indexed drone images and objects. 

The data source is an Azure AI Search resource where the schema has JSON description field, a vector field and an id field and this id field must be cited in your answer. 

If you do not find a match for the query, respond with "I don't know", otherwise cite references with the value of the id field. 

""" 

 

connection_id = os.getenv("AI_AZURE_AI_CONNECTION_ID","https://srch-vision-01.search.windows.net") 

# Initialize agent AI search tool and add the search index connection id 

 

# Initialize the AzureAISearchTool 

# You can specify optional parameters like query_type, filter, and top_k 

search_tool = AzureAISearchTool( 

    index_connection_id=connection_id, 

    index_name=index_name, 

    query_type=AzureAISearchQueryType.VECTOR_SEMANTIC_HYBRID, 

    filter="",  # Optional filter expression 

    top_k=3  # Number of results to return 

) 

# ai_search_tool = AzureSearchToolset(search_endpoint, index_name, search_api_key) 

 

# agents_client.create_agent(agent)  

agent = agents_client.create_agent( 

    model=agent_model, # azure_openai_embedding_model, 

    name=agent_name, 

    instructions=instructions, 

    tools=search_tool.definitions, 

    tool_resources=search_tool.resources 

) 

 

# Create a thread for the conversation 

thread = agents_client.threads.create() 

 

# Send a user message (the query text) 

query_text = "How many red cars can be found?" 

message = agents_client.messages.create( 

    thread_id=thread.id, 

    role=MessageRole.USER, 

    content=query_text, 

) 

# Run the agent to process the query 

run = agents_client.runs.create_and_process(thread_id=thread.id, agent_id=agent.id) 

 

# Check run status 

if run.status == "failed": 

    print(f"Run failed: {run.last_error}") 

 

# Retrieve and print all messages in the thread (including agent's answer) 

messages = agents_client.messages.list(thread_id=thread.id, order=ListSortOrder.ASCENDING) 

for message in messages: 

    print(f"Role: {message.role}, Content: {message.content}")