Aracılığıyla paylaş


Dosya Arama

Dosya Arama aracıların ilgili bilgileri bulmak için karşıya yüklenen dosyalar arasında arama yapmalarını sağlar. Bu araç özellikle belgelerle ilgili soruları yanıtlayan, dosya içeriğini çözümleyebilecek ve bilgileri ayıklayan aracılar oluşturmak için kullanışlıdır.

Uyarı

Dosya Arama kullanılabilirliği, temel alınan aracı sağlayıcısına bağlıdır. Sağlayıcıya özgü destek için bkz. Sağlayıcılara Genel Bakış .

Aşağıdaki örnekte, Dosya Arama aracısıyla aracı oluşturma gösterilmektedir:

using System;
using Azure.AI.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;

// Requires: dotnet add package Microsoft.Agents.AI.OpenAI --prerelease
var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT")
    ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-4o-mini";

// Create an agent with the file search hosted tool
// Provide vector store IDs containing your uploaded documents
AIAgent agent = new AzureOpenAIClient(new Uri(endpoint), new AzureCliCredential())
    .GetChatClient(deploymentName)
    .AsAIAgent(
        instructions: "You are a helpful assistant that searches through files to find information.",
        tools: [new FileSearchToolDefinition(vectorStoreIds: ["<your-vector-store-id>"])]);

Console.WriteLine(await agent.RunAsync("What does the document say about today's weather?"));

Aşağıdaki örnekte, Dosya Arama aracı ve örnek belgelerle aracı oluşturma gösterilmektedir:

Örnek belgeleri tanımlama

# Copyright (c) Microsoft. All rights reserved.

import asyncio

from agent_framework import Agent, Content
from agent_framework.openai import OpenAIResponsesClient

"""
OpenAI Responses Client with File Search Example

This sample demonstrates using get_file_search_tool() with OpenAI Responses Client
for direct document-based question answering and information retrieval.
"""

# Helper functions


async def create_vector_store(client: OpenAIResponsesClient) -> tuple[str, Content]:
    """Create a vector store with sample documents."""
    file = await client.client.files.create(
        file=("todays_weather.txt", b"The weather today is sunny with a high of 75F."), purpose="user_data"
    )
    vector_store = await client.client.vector_stores.create(
        name="knowledge_base",
        expires_after={"anchor": "last_active_at", "days": 1},
    )
    result = await client.client.vector_stores.files.create_and_poll(vector_store_id=vector_store.id, file_id=file.id)
    if result.last_error is not None:
        raise Exception(f"Vector store file processing failed with status: {result.last_error.message}")

    return file.id, Content.from_hosted_vector_store(vector_store_id=vector_store.id)


async def delete_vector_store(client: OpenAIResponsesClient, file_id: str, vector_store_id: str) -> None:
    """Delete the vector store after using it."""
    await client.client.vector_stores.delete(vector_store_id=vector_store_id)
    await client.client.files.delete(file_id=file_id)


async def main() -> None:
    client = OpenAIResponsesClient()

    message = "What is the weather today? Do a file search to find the answer."

    stream = False
    print(f"User: {message}")
    file_id, vector_store_id = await create_vector_store(client)

    agent = Agent(
        client=client,
        instructions="You are a helpful assistant that can search through files to find information.",
        tools=[client.get_file_search_tool(vector_store_ids=[vector_store_id])],
    )

    if stream:
        print("Assistant: ", end="")
        async for chunk in agent.run(message, stream=True):
            if chunk.text:
                print(chunk.text, end="")
        print("")
    else:
        response = await agent.run(message)
        print(f"Assistant: {response}")
    await delete_vector_store(client, file_id, vector_store_id)


if __name__ == "__main__":
    asyncio.run(main())

Aracıyı çalıştır

# Copyright (c) Microsoft. All rights reserved.

import asyncio

from agent_framework import Agent, Content
from agent_framework.openai import OpenAIResponsesClient

"""
OpenAI Responses Client with File Search Example

This sample demonstrates using get_file_search_tool() with OpenAI Responses Client
for direct document-based question answering and information retrieval.
"""

# Helper functions


async def create_vector_store(client: OpenAIResponsesClient) -> tuple[str, Content]:
    """Create a vector store with sample documents."""
    file = await client.client.files.create(
        file=("todays_weather.txt", b"The weather today is sunny with a high of 75F."), purpose="user_data"
    )
    vector_store = await client.client.vector_stores.create(
        name="knowledge_base",
        expires_after={"anchor": "last_active_at", "days": 1},
    )
    result = await client.client.vector_stores.files.create_and_poll(vector_store_id=vector_store.id, file_id=file.id)
    if result.last_error is not None:
        raise Exception(f"Vector store file processing failed with status: {result.last_error.message}")

    return file.id, Content.from_hosted_vector_store(vector_store_id=vector_store.id)


async def delete_vector_store(client: OpenAIResponsesClient, file_id: str, vector_store_id: str) -> None:
    """Delete the vector store after using it."""
    await client.client.vector_stores.delete(vector_store_id=vector_store_id)
    await client.client.files.delete(file_id=file_id)


async def main() -> None:
    client = OpenAIResponsesClient()

    message = "What is the weather today? Do a file search to find the answer."

    stream = False
    print(f"User: {message}")
    file_id, vector_store_id = await create_vector_store(client)

    agent = Agent(
        client=client,
        instructions="You are a helpful assistant that can search through files to find information.",
        tools=[client.get_file_search_tool(vector_store_ids=[vector_store_id])],
    )

    if stream:
        print("Assistant: ", end="")
        async for chunk in agent.run(message, stream=True):
            if chunk.text:
                print(chunk.text, end="")
        print("")
    else:
        response = await agent.run(message)
        print(f"Assistant: {response}")
    await delete_vector_store(client, file_id, vector_store_id)


if __name__ == "__main__":
    asyncio.run(main())

Sonraki Adımlar